file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
aim_test.go | package nightwatcher
import (
"testing"
"net" |
func TestAim_FromIP(t *testing.T) {
aim := &Aim{}
localIp := aim.FromIP()
found := false
addrs, _ := net.InterfaceAddrs()
for _, addr := range addrs {
if ipnet, ok := addr.(*net.IPNet); ok && !ipnet.IP.IsLoopback() {
if ipnet.IP.To4() != nil {
if localIp == ipnet.IP.String() {
found = true
}
println(ipnet.IP.String())
}
}
}
if !found {
t.Fatalf("FromIp failed, got %v", localIp)
}
} | ) |
test_github.py | from common_fixtures import * # NOQA
from selenium import webdriver
from selenium.webdriver.phantomjs.service import Service as PhantomJSService
from requests.auth import AuthBase
# test the github auth workflow
USER_SCOPE = 'github_user'
TEAM_SCOPE = 'github_team'
ORG_SCOPE = 'github_org'
class NewService(PhantomJSService):
def __init__(self, *args, **kwargs):
super(NewService, self).__init__(*args, **kwargs)
webdriver.phantomjs.webdriver.Service = NewService
if_github = pytest.mark.skipif(not os.environ.get('API_AUTH_GITHUB'
'_CLIENT_SECRET'),
reason='API_AUTH_GITHUB'
'_CLIENT_SECRET is not set')
BASE_URL = cattle_url() + '/v1/'
URL = BASE_URL + 'schemas'
@pytest.fixture(scope='session')
def config():
needed_vars = [
'API_AUTH_GITHUB_TEST_USER',
'API_AUTH_GITHUB_TEST_PASS',
'API_AUTH_GITHUB_CLIENT_ID',
'API_AUTH_GITHUB_CLIENT_SECRET',
'API_AUTH_RANCHER_TEST_PASS',
]
for a in needed_vars:
if os.getenv(a, None) is None:
raise Exception('Please set ' + a + ' in the environment')
config = {}
config['username'] = os.getenv('API_AUTH_GITHUB_TEST_USER', None)
config['password'] = os.getenv('API_AUTH_GITHUB_TEST_PASS', None)
config['phantomjs_port'] = int(os.getenv('PHANTOMJS_WEBDRIVER_PORT', 4444))
config['phantomjs_bin'] = os.getenv('PHANTOMJS_BIN',
'/usr/local/bin/phantomjs')
assert config['phantomjs_bin'] is not None
config['client_id'] = os.getenv('API_AUTH_GITHUB_CLIENT_ID', None)
config['client_secret'] = os.getenv('API_AUTH_GITHUB_CLIENT_SECRET', None)
config['users'] = {}
config['users']['1'] = {
'password': os.getenv('API_AUTH_RANCHER_TEST_PASS', None),
'username': os.getenv('API_AUTH_RANCHER_TEST_USER_1', 'ranchertest01')
}
config['users']['2'] = {
'password': os.getenv('API_AUTH_RANCHER_TEST_PASS_2', None),
'username': os.getenv('API_AUTH_RANCHER_TEST_USER_2', 'ranchertest02')
}
return config
@pytest.fixture(scope='module')
def github_request_code(config, cattle_url, admin_client, request, user=None):
def fin():
admin_client.create_githubconfig(enabled=False,
accessMode='restricted')
request.addfinalizer(fin)
username = config['username']
password = config['password']
enabled = False
if user is not None:
username = user['username']
password = user['password']
enabled = True
driver = webdriver.PhantomJS(config['phantomjs_bin'],
port=config['phantomjs_port'])
max_wait = 60
driver.set_page_load_timeout(max_wait)
driver.set_script_timeout(max_wait)
driver.implicitly_wait(10)
# undo monkey patching
webdriver.phantomjs.webdriver.Service = PhantomJSService
driver.set_window_size(1120, 550)
admin_client.create_githubconfig(enabled=enabled,
accessMode='unrestricted',
clientId=config['client_id'],
clientSecret=config['client_secret'])
urlx = "https://github.com/login/oauth/authorize?response_type=code&client_id=" +\
config['client_id'] + "&scope=read:org"
driver.get(urlx)
driver.find_element_by_id('login_field').send_keys(username)
driver.find_element_by_id('password').send_keys(password)
driver.find_element_by_name('commit').submit()
try:
driver.find_element_by_class_name('btn-primary').click()
except:
pass
driver.get('https://github.com')
cookie_dict = dict(driver.get_cookie('_gh_sess'))
cookie_dict = {'_gh_sess': cookie_dict['value']}
cookie_dict['user_session'] = driver.get_cookie('user_session')['value']
r = requests.get(urlx, cookies=cookie_dict, allow_redirects=False)
redirect_url = r.headers['location']
code = redirect_url.rsplit('=')[1]
driver.quit()
return code
@pytest.fixture(scope='module')
def github_request_token(github_request_code):
code = github_request_code
c = requests.post(BASE_URL + 'token', {'code': code})
return c.json()['jwt']
@pytest.fixture(scope='module')
def github_client(request, cattle_url, github_request_token, admin_client):
github_client = from_env(url=cattle_url)
github_client.delete_by_id = delete_by_id
assert github_client.valid()
jwt = github_request_token
github_client._auth = GithubAuth(jwt)
return github_client
def delete_by_id(self, type, id):
url = self.schema.types[type].links.collection
if url.endswith('/'):
url = url + id
else:
url = '/'.join([url, id])
return self._delete(url)
def _create_member(name='rancherio', role='member', type=ORG_SCOPE):
|
def diff_members(members, got_members):
assert len(members) == len(got_members)
members_a = set([])
members_b = set([])
for member in members:
members_a.add(member['externalId'] + ' ' + member['externalIdType']
+ ' ' + member['role'])
for member in got_members:
members_b.add(member['externalId'] + ' ' + member['externalIdType']
+ ' ' + member['role'])
assert members_a == members_b
def get_plain_members(members):
plain_members = []
for member in members.data:
plain_members.append({
'role': member.role,
'externalId': member.externalId,
'externalIdType': member.externalIdType
})
return plain_members
class GithubAuth(AuthBase):
def __init__(self, jwt, prj_id=None):
# setup any auth-related data here
self.jwt = jwt
self.prj_id = prj_id
def __call__(self, r):
# modify and return the request
r.headers['Authorization'] = 'Bearer ' + self.jwt
if self.prj_id is not None:
r.headers['X-API-Project-Id'] = self.prj_id
return r
def switch_on_auth(admin_client, request, config):
admin_client.create_githubconfig(enabled=True, accessMode='restricted',
clientId=config['client_id'],
clientSecret=config['client_secret'])
def fin():
admin_client.create_githubconfig(enabled=False,
accessMode='restricted',
allowedUsers=[],
allowedOrganizations=[],
clientId='',
clientSecret='')
request.addfinalizer(fin)
@if_github
def test_github_auth_config_unauth_user(request, admin_client,
config):
switch_on_auth(admin_client, request, config)
# do not set any auth headers
no_auth = requests.get(URL)
# test that auth is switched on
assert no_auth.status_code == 401
@if_github
def test_github_auth_config_invalid_user(request,
admin_client, config):
switch_on_auth(admin_client, request, config)
# set invalid auth headers
bad_auth = requests.get(URL,
headers={'Authorization':
'Bearer some_random_string'})
# test that user does not have access
assert bad_auth.status_code == 401
@if_github
def test_github_auth_config_valid_user(github_request_token,
admin_client, request, config):
switch_on_auth(admin_client, request, config)
jwt = github_request_token
# set valid auth headers
schemas = requests.get(URL, headers={'Authorization': 'Bearer ' + jwt})
# test that user has access
assert schemas.status_code == 200
@if_github
def test_github_auth_config_api_whitelist_users(admin_client, request,
github_client, config):
switch_on_auth(admin_client, request, config)
github_client.create_githubconfig(allowedUsers=[
config['users']['1']['username'],
config['users']['2']['username']
],
clientId=config['client_id'],
clientSecret=config['client_secret']
)
# test that these users were whitelisted
r = github_client.list_githubconfig()
users = r[0]['allowedUsers']
assert len(users) == 2
assert config['users']['1']['username'] in users
assert config['users']['2']['username'] in users
assert 'ranchertest02' in users
@if_github
def test_github_auth_config_api_whitelist_orgs(admin_client, request,
config, github_client):
switch_on_auth(admin_client, request, config)
# set whitelisted org
github_client.create_githubconfig(allowedOrganizations=['rancherio'])
# test that these org was whitelisted
r = github_client.list_githubconfig()
orgs = r[0]['allowedOrganizations']
assert len(orgs) == 1
assert 'rancherio' in orgs
@if_github
def test_github_add_whitelisted_user(admin_client, config, github_client,
request):
switch_on_auth(admin_client, request, config)
github_client.create_githubconfig(allowedUsers=[
config['users']['1']['username']
])
# test that these users were whitelisted
r = github_client.list_githubconfig()
users = r[0]['allowedUsers']
assert config['users']['1']['username'] in users
new_token = github_request_code(config, cattle_url, admin_client, request,
user=config['users']['1'])
new_token = github_request_token(new_token)
assert new_token is not None
@if_github
def test_github_projects(cattle_url, config, request,
admin_client, github_client):
user_client = from_env(url=cattle_url)
switch_on_auth(admin_client, request, config)
github_client.create_githubconfig(allowedUsers=[
config['users']['1']['username']
])
# test that the users is whitelisted
r = github_client.list_githubconfig()
users = r[0]['allowedUsers']
assert config['users']['1']['username'] in users
new_token = github_request_code(config, cattle_url, admin_client, request,
user=config['users']['1'])
new_token = github_request_token(new_token)
user_client._auth = GithubAuth(new_token)
members = [_create_member(
name=config['users']['1']['username'],
type=USER_SCOPE,
role='owner'
), _create_member()]
project = user_client.create_project(members=members)
assert len(project.projectMembers()) == 2
diff_members(get_plain_members(project.projectMembers()), members)
project = user_client.wait_success(project)
project = user_client.wait_success(project.deactivate())
project = user_client.wait_success(project.remove())
project = user_client.wait_success(project.purge())
project = user_client.by_id('project', project.id)
assert project.state == 'purged'
@if_github
def test_github_id_name(config, cattle_url, request,
admin_client, github_client):
user_client = from_env(url=cattle_url)
switch_on_auth(admin_client, request, config)
github_client.create_githubconfig(allowedUsers=[
config['users']['1']['username']
])
new_token = github_request_code(config, cattle_url, admin_client, request,
user=config['users']['1'])
new_token = github_request_token(new_token)
user_client._auth = GithubAuth(new_token)
sent_members = [_create_member(
name=config['users']['1']['username'],
type=USER_SCOPE,
role='owner'
),
_create_member()
]
project = user_client.create_project(members=sent_members)
members = get_plain_members(project.projectMembers())
assert len(members) == 2
diff_members(members, sent_members)
| return {
'role': role,
'externalId': name,
'externalIdType': type
} |
validators.go | // Code generated by smithy-go-codegen DO NOT EDIT.
package mediaconvert
import (
"context"
"fmt"
"github.com/aws/aws-sdk-go-v2/service/mediaconvert/types"
smithy "github.com/awslabs/smithy-go"
"github.com/awslabs/smithy-go/middleware"
)
type validateOpAssociateCertificate struct {
}
func (*validateOpAssociateCertificate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpAssociateCertificate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*AssociateCertificateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpAssociateCertificateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpCancelJob struct {
}
func (*validateOpCancelJob) ID() string {
return "OperationInputValidation"
}
func (m *validateOpCancelJob) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*CancelJobInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpCancelJobInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpCreateJob struct {
}
func (*validateOpCreateJob) ID() string {
return "OperationInputValidation"
}
func (m *validateOpCreateJob) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*CreateJobInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpCreateJobInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpCreateJobTemplate struct {
}
func (*validateOpCreateJobTemplate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpCreateJobTemplate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*CreateJobTemplateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpCreateJobTemplateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpCreatePreset struct {
}
func (*validateOpCreatePreset) ID() string {
return "OperationInputValidation"
}
func (m *validateOpCreatePreset) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*CreatePresetInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpCreatePresetInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpCreateQueue struct {
}
func (*validateOpCreateQueue) ID() string {
return "OperationInputValidation"
}
func (m *validateOpCreateQueue) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*CreateQueueInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpCreateQueueInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpDeleteJobTemplate struct {
}
func (*validateOpDeleteJobTemplate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpDeleteJobTemplate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*DeleteJobTemplateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpDeleteJobTemplateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpDeletePreset struct {
}
func (*validateOpDeletePreset) ID() string {
return "OperationInputValidation"
}
func (m *validateOpDeletePreset) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*DeletePresetInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpDeletePresetInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpDeleteQueue struct {
}
func (*validateOpDeleteQueue) ID() string {
return "OperationInputValidation"
}
func (m *validateOpDeleteQueue) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*DeleteQueueInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpDeleteQueueInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpDisassociateCertificate struct {
}
func (*validateOpDisassociateCertificate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpDisassociateCertificate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*DisassociateCertificateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpDisassociateCertificateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpGetJob struct {
}
func (*validateOpGetJob) ID() string {
return "OperationInputValidation"
}
func (m *validateOpGetJob) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*GetJobInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpGetJobInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpGetJobTemplate struct {
}
func (*validateOpGetJobTemplate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpGetJobTemplate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*GetJobTemplateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpGetJobTemplateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpGetPreset struct {
}
func (*validateOpGetPreset) ID() string {
return "OperationInputValidation"
}
func (m *validateOpGetPreset) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*GetPresetInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpGetPresetInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpGetQueue struct {
}
func (*validateOpGetQueue) ID() string {
return "OperationInputValidation"
}
func (m *validateOpGetQueue) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*GetQueueInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpGetQueueInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpListTagsForResource struct {
}
func (*validateOpListTagsForResource) ID() string {
return "OperationInputValidation"
}
func (m *validateOpListTagsForResource) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*ListTagsForResourceInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpListTagsForResourceInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpTagResource struct {
}
func (*validateOpTagResource) ID() string {
return "OperationInputValidation"
}
func (m *validateOpTagResource) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*TagResourceInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpTagResourceInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpUntagResource struct {
}
func (*validateOpUntagResource) ID() string {
return "OperationInputValidation"
}
func (m *validateOpUntagResource) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*UntagResourceInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpUntagResourceInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpUpdateJobTemplate struct {
}
func (*validateOpUpdateJobTemplate) ID() string {
return "OperationInputValidation"
}
func (m *validateOpUpdateJobTemplate) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*UpdateJobTemplateInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpUpdateJobTemplateInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpUpdatePreset struct {
}
func (*validateOpUpdatePreset) ID() string {
return "OperationInputValidation"
}
func (m *validateOpUpdatePreset) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*UpdatePresetInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpUpdatePresetInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
type validateOpUpdateQueue struct {
}
func (*validateOpUpdateQueue) ID() string {
return "OperationInputValidation"
}
func (m *validateOpUpdateQueue) HandleInitialize(ctx context.Context, in middleware.InitializeInput, next middleware.InitializeHandler) (
out middleware.InitializeOutput, metadata middleware.Metadata, err error,
) {
input, ok := in.Parameters.(*UpdateQueueInput)
if !ok {
return out, metadata, fmt.Errorf("unknown input parameters type %T", in.Parameters)
}
if err := validateOpUpdateQueueInput(input); err != nil {
return out, metadata, err
}
return next.HandleInitialize(ctx, in)
}
func addOpAssociateCertificateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpAssociateCertificate{}, middleware.After)
}
func addOpCancelJobValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpCancelJob{}, middleware.After)
}
func addOpCreateJobValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpCreateJob{}, middleware.After)
}
func addOpCreateJobTemplateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpCreateJobTemplate{}, middleware.After)
}
func addOpCreatePresetValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpCreatePreset{}, middleware.After)
}
func addOpCreateQueueValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpCreateQueue{}, middleware.After)
}
func addOpDeleteJobTemplateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpDeleteJobTemplate{}, middleware.After)
}
func addOpDeletePresetValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpDeletePreset{}, middleware.After)
}
func addOpDeleteQueueValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpDeleteQueue{}, middleware.After)
}
func addOpDisassociateCertificateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpDisassociateCertificate{}, middleware.After)
}
func addOpGetJobValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpGetJob{}, middleware.After)
}
func addOpGetJobTemplateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpGetJobTemplate{}, middleware.After)
}
func addOpGetPresetValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpGetPreset{}, middleware.After)
}
func addOpGetQueueValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpGetQueue{}, middleware.After)
}
func addOpListTagsForResourceValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpListTagsForResource{}, middleware.After)
}
func addOpTagResourceValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpTagResource{}, middleware.After)
}
func addOpUntagResourceValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpUntagResource{}, middleware.After)
}
func addOpUpdateJobTemplateValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpUpdateJobTemplate{}, middleware.After)
}
func addOpUpdatePresetValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpUpdatePreset{}, middleware.After)
}
func addOpUpdateQueueValidationMiddleware(stack *middleware.Stack) error {
return stack.Initialize.Add(&validateOpUpdateQueue{}, middleware.After)
}
func validateAccelerationSettings(v *types.AccelerationSettings) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "AccelerationSettings"}
if len(v.Mode) == 0 {
invalidParams.Add(smithy.NewErrParamRequired("Mode"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateReservationPlanSettings(v *types.ReservationPlanSettings) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "ReservationPlanSettings"}
if len(v.Commitment) == 0 {
invalidParams.Add(smithy.NewErrParamRequired("Commitment"))
}
if len(v.RenewalType) == 0 {
invalidParams.Add(smithy.NewErrParamRequired("RenewalType"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpAssociateCertificateInput(v *AssociateCertificateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "AssociateCertificateInput"}
if v.Arn == nil {
invalidParams.Add(smithy.NewErrParamRequired("Arn"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
} | if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "CancelJobInput"}
if v.Id == nil {
invalidParams.Add(smithy.NewErrParamRequired("Id"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpCreateJobInput(v *CreateJobInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "CreateJobInput"}
if v.AccelerationSettings != nil {
if err := validateAccelerationSettings(v.AccelerationSettings); err != nil {
invalidParams.AddNested("AccelerationSettings", err.(smithy.InvalidParamsError))
}
}
if v.Role == nil {
invalidParams.Add(smithy.NewErrParamRequired("Role"))
}
if v.Settings == nil {
invalidParams.Add(smithy.NewErrParamRequired("Settings"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpCreateJobTemplateInput(v *CreateJobTemplateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "CreateJobTemplateInput"}
if v.AccelerationSettings != nil {
if err := validateAccelerationSettings(v.AccelerationSettings); err != nil {
invalidParams.AddNested("AccelerationSettings", err.(smithy.InvalidParamsError))
}
}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if v.Settings == nil {
invalidParams.Add(smithy.NewErrParamRequired("Settings"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpCreatePresetInput(v *CreatePresetInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "CreatePresetInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if v.Settings == nil {
invalidParams.Add(smithy.NewErrParamRequired("Settings"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpCreateQueueInput(v *CreateQueueInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "CreateQueueInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if v.ReservationPlanSettings != nil {
if err := validateReservationPlanSettings(v.ReservationPlanSettings); err != nil {
invalidParams.AddNested("ReservationPlanSettings", err.(smithy.InvalidParamsError))
}
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpDeleteJobTemplateInput(v *DeleteJobTemplateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "DeleteJobTemplateInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpDeletePresetInput(v *DeletePresetInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "DeletePresetInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpDeleteQueueInput(v *DeleteQueueInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "DeleteQueueInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpDisassociateCertificateInput(v *DisassociateCertificateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "DisassociateCertificateInput"}
if v.Arn == nil {
invalidParams.Add(smithy.NewErrParamRequired("Arn"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpGetJobInput(v *GetJobInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "GetJobInput"}
if v.Id == nil {
invalidParams.Add(smithy.NewErrParamRequired("Id"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpGetJobTemplateInput(v *GetJobTemplateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "GetJobTemplateInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpGetPresetInput(v *GetPresetInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "GetPresetInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpGetQueueInput(v *GetQueueInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "GetQueueInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpListTagsForResourceInput(v *ListTagsForResourceInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "ListTagsForResourceInput"}
if v.Arn == nil {
invalidParams.Add(smithy.NewErrParamRequired("Arn"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpTagResourceInput(v *TagResourceInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "TagResourceInput"}
if v.Arn == nil {
invalidParams.Add(smithy.NewErrParamRequired("Arn"))
}
if v.Tags == nil {
invalidParams.Add(smithy.NewErrParamRequired("Tags"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpUntagResourceInput(v *UntagResourceInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "UntagResourceInput"}
if v.Arn == nil {
invalidParams.Add(smithy.NewErrParamRequired("Arn"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpUpdateJobTemplateInput(v *UpdateJobTemplateInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "UpdateJobTemplateInput"}
if v.AccelerationSettings != nil {
if err := validateAccelerationSettings(v.AccelerationSettings); err != nil {
invalidParams.AddNested("AccelerationSettings", err.(smithy.InvalidParamsError))
}
}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpUpdatePresetInput(v *UpdatePresetInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "UpdatePresetInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
}
func validateOpUpdateQueueInput(v *UpdateQueueInput) error {
if v == nil {
return nil
}
invalidParams := smithy.InvalidParamsError{Context: "UpdateQueueInput"}
if v.Name == nil {
invalidParams.Add(smithy.NewErrParamRequired("Name"))
}
if v.ReservationPlanSettings != nil {
if err := validateReservationPlanSettings(v.ReservationPlanSettings); err != nil {
invalidParams.AddNested("ReservationPlanSettings", err.(smithy.InvalidParamsError))
}
}
if invalidParams.Len() > 0 {
return invalidParams
} else {
return nil
}
} |
func validateOpCancelJobInput(v *CancelJobInput) error { |
vk_bot.py | from pymongo import MongoClient
import random
from datetime import datetime
random.seed(datetime.now())
class | ():
def __init__(self, user_id):
print("Создан объект бота!")
self.user_id = user_id
self.send = {'lang': '', 'level': '', 'format': '', 'discus': ''}
self.steps = {1: False, 2: False, 3: False, 4: False, 5: False}
def message(self,response,keyboard):
themes = ['социальная сфера', 'политика', 'экономика', 'наркотики', 'феминизм', 'международные отношения', \
'спорт', 'сми', 'мигранты', 'религия', 'этика']
if response == "поехали" or response == "start" or response == "начать" or response == "заново":
self.send = {'lang': '', 'level': '', 'format': '', 'discus': ''}
self.steps = {1: False, 2: False, 3: False, 4: False, 5: False}
return [ 'user_id', self.user_id, 'Хочешь сам выбрать или доверишься мне?', keyboard]
elif response == "выбрать тему":
if self.steps[1] == False:
self.steps[1] = True
return( 'user_id', self.user_id,'Выбери язык', keyboard)
else:
return( 'user_id', self.user_id,'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == "случайная тема":
return( 'user_id', self.user_id, self.get_theme(self.send), keyboard)
elif response == 'русский':
if self.steps[1] == True and self.steps[2] == False:
self.steps[2] = True
self.send['lang'] = 'ru'
return( 'user_id', self.user_id, 'Выбери сложность', keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'английский':
if self.steps[1] == True and self.steps[2] == False:
self.steps[2] = True
self.send['lang'] = 'en'
return( 'user_id', self.user_id, 'Выбери сложность', keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'легкая':
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == False:
self.steps[3] = True
self.send['level'] = 'easy'
return( 'user_id', self.user_id, 'Выбери сферу', keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'сложная':
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == False:
self.steps[3] = True
self.send['level'] = 'hard'
return( 'user_id', self.user_id, 'Выбери сферу', keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response in themes: # было "социалочка"
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == True and self.steps[4] == False:
self.steps[4] = True
if response == themes[0]:
self.send['discus'] = 'soc'
elif response == themes[1]:
self.send['discus'] = 'polit'
elif response == themes[2]:
self.send['discus'] = 'econ'
elif response == themes[3]:
self.send['discus'] = 'narco'
elif response == themes[4]:
self.send['discus'] = 'fem'
elif response == themes[5]:
self.send['discus'] = 'inter'
elif response == themes[6]:
self.send['discus'] = 'sport'
elif response == themes[7]:
self.send['discus'] = 'media'
elif response == themes[8]:
self.send['discus'] = 'migr'
elif response == themes[9]:
self.send['discus'] = 'relig'
elif response == themes[10]:
self.send['discus'] = 'ethics'
# здесь будет добавление в словарь тэга "сфера дискуссии"
return( 'user_id', self.user_id, 'Выбери формат', keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'эп':
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == True and \
self.steps[4] == True and self.steps[5] == False:
self.steps[5] = True
self.send['format'] = 'th'
return( 'user_id', self.user_id, self.get_theme(self.send), keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'эпсч':
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == True and \
self.steps[4] == True and self.steps[5] == False:
self.steps[5] = True
self.send['format'] = 'thbt'
return( 'user_id', self.user_id, self.get_theme(self.send), keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
elif response == 'эп как':
if self.steps[1] == True and self.steps[2] == True and self.steps[3] == True and \
self.steps[4] == True and self.steps[5] == False:
self.steps[5] = True
self.send['format'] = 'tha'
return( 'user_id', self.user_id, self.get_theme(self.send), keyboard)
else:
return( 'user_id', self.user_id, 'Много просить нельзя, у тебя же есть кнопочки, \
пользуйся\nЕсли хочешь начать заново, то напиши заново')
# elif response == 'закрыть':
# return ('user_id', self.user_id, 'Закрыть', keyboard)
def get_theme(self, send):
client = MongoClient("mongodb://127.0.0.1:27017")
db = client.dbThemes
col = db.data
ss = {}
t_list = []
for i in send.items():
if i[1] != '':
ss[i[0]] = i[1]
print(ss)
col = col.find(ss)
for i in col:
t_list.append(i['name'])
if len(t_list) != 0:
res = random.choice(t_list)
else:
res = 'К сожалению таких тем нет'
return res
| VkBot |
0011_jobreviewcomment.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import markupfield.fields
import django.utils.timezone
from django.conf import settings
class | (migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('jobs', '0010_auto_20150416_1853'),
]
operations = [
migrations.CreateModel(
name='JobReviewComment',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', primary_key=True, auto_created=True)),
('created', models.DateTimeField(blank=True, default=django.utils.timezone.now, db_index=True)),
('updated', models.DateTimeField(blank=True)),
('comment', markupfield.fields.MarkupField(rendered_field=True)),
('comment_markup_type', models.CharField(choices=[('', '--'), ('html', 'HTML'), ('plain', 'Plain'), ('markdown', 'Markdown'), ('restructuredtext', 'Restructured Text')], max_length=30, default='restructuredtext')),
('_comment_rendered', models.TextField(editable=False)),
('creator', models.ForeignKey(related_name='jobs_jobreviewcomment_creator', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
('job', models.ForeignKey(related_name='review_comments', to='jobs.Job', on_delete=models.CASCADE)),
('last_modified_by', models.ForeignKey(related_name='jobs_jobreviewcomment_modified', to=settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
]
| Migration |
pipes_tests.py | from unittest.mock import Mock, call, patch
from pytest import fixture, mark, fail
from pyviews.core import XmlAttr
from wxviews.core import pipes, WxRenderingContext
from wxviews.core.pipes import apply_attributes, add_to_sizer
from wxviews.widgets import WxNode
class TestControl:
def __init__(self):
self.node_key = None
self.instance_key = None
class TestNode(WxNode):
def __init__(self, widget):
super().__init__(widget, Mock())
self.node_key = None
@fixture
def apply_attribute_fixture(request):
with patch(pipes.__name__ + '.apply_attribute') as apply_attribute_mock:
request.cls.apply_attribute = apply_attribute_mock
yield apply_attribute_mock
@mark.usefixtures('apply_attribute_fixture')
class ApplyAttributesTests:
"""apply_attributes() step tests"""
@mark.parametrize('attr', [
XmlAttr('key', 'value', 'init')
])
def test_skip_special_attributes(self, attr):
"""should skip attributes with "init" and "sizer" namespaces"""
self.apply_attribute.reset_mock()
node = Mock(xml_node=Mock(attrs=[attr]))
apply_attributes(node, WxRenderingContext())
assert not self.apply_attribute.called
@mark.parametrize('attrs', [
[XmlAttr('key', 'value')],
[XmlAttr('key', 'value', ''), XmlAttr('other_key', 'key', 'some namespace')]
])
def test_apply_attributes(self, attrs):
"""should apply passed attributes"""
self.apply_attribute.reset_mock()
node = Mock(xml_node=Mock(attrs=attrs))
apply_attributes(node, WxRenderingContext())
assert self.apply_attribute.call_args_list == [call(node, attr) for attr in attrs]
class AddToSizerTests:
"""add_to_sizer() step tests"""
@staticmethod
def | (sizer_args=None, node_globals=None):
sizer_args = sizer_args if sizer_args else {}
node = Mock(sizer_args=sizer_args, node_globals=node_globals, instace=Mock())
return node, Mock()
@mark.parametrize('sizer_args', [
{},
{'key': 'value'},
{'key': 'value', 'one': 1}
])
def test_passes_attr_args(self, sizer_args):
"""should call sizer.Add with node.sizer_args"""
node, sizer = self._get_mocks(sizer_args)
add_to_sizer(node, WxRenderingContext({'sizer': sizer}))
assert sizer.Add.call_args == call(node.instance, **sizer_args)
def test_skips_if_sizer_missed(self):
"""should skip if sizer is missed"""
node = self._get_mocks()[0]
try:
add_to_sizer(node, WxRenderingContext())
except BaseException:
fail()
| _get_mocks |
outputs_created.rs | // Copyright 2021 IOTA Stiftung
// SPDX-License-Identifier: Apache-2.0
use crate::{plugins::mqtt::handlers::spawn_static_topic_handler, storage::StorageBackend};
use bee_ledger::workers::event::OutputCreated;
use bee_rest_api::types::responses::OutputResponse;
use bee_runtime::node::Node;
use librumqttd::LinkTx;
pub(crate) fn spawn<N>(node: &mut N, outputs_created_tx: LinkTx)
where
N: Node,
N::Backend: StorageBackend,
{
spawn_static_topic_handler(
node,
outputs_created_tx,
"outputs/[outputId] created",
|event: OutputCreated| {
let output_response_json = serde_json::to_string(&OutputResponse {
message_id: event.message_id.to_string(),
transaction_id: event.output_id.transaction_id().to_string(),
output_index: event.output_id.index(),
is_spent: false,
output: (&event.output).into(), | })
.expect("error serializing to json");
(format!("outputs/{}", event.output_id.to_string()), output_response_json)
},
);
} | ledger_index: 0, // TODO: set actual ledger-index |
0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
class Migration(migrations.Migration): |
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('email', models.EmailField(unique=True, max_length=75)),
('rpwd', models.CharField(max_length=20)),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('gender', models.CharField(default=b'Unknown', max_length=10, verbose_name=b'Gender', choices=[(b'Male', b'Male'), (b'Female', b'Female')])),
('join_date', models.DateTimeField(auto_now_add=True)),
('mobile', models.CharField(max_length=15)),
('user_type', models.CharField(default=b'user', max_length=10, verbose_name=b'UserType', choices=[(b'user', b'user'), (b'Admin', b'Admin')])),
('is_admin', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
],
options={
'abstract': False,
},
bases=(models.Model,),
),
] | |
button.tsx | import React, { ReactElement, FunctionComponent, CSSProperties, memo } from 'react';
import styled, { css } from 'styled-components';
import {
primaryButtonBackground,
primaryButtonHoverBackground,
primaryButtonActiveBackground,
primaryButtonFocusShadowColor,
dangerButtonHoverBackground,
dangerButtonBackground,
dangerButtonActiveBackground,
dangerButtonFocusShadowColor,
} from '@app/styles/variables';
const StyledButton = styled.button`
display: inline-block;
font-weight: 400;
color: #606a96;
text-align: center;
vertical-align: middle;
user-select: none;
background-color: transparent;
border: 1px solid transparent;
padding: 0.625rem 1rem;
font-size: 0.875rem;
line-height: 1;
border-radius: 0.25rem;
transition: color 0.15s ease-in-out, background-color 0.15s ease-in-out, border-color 0.15s ease-in-out,
box-shadow 0.15s ease-in-out;
cursor: pointer;
&:focus,
&:active {
outline: none;
}
${({ theme }: ButtonProps) =>
theme === 'primary' && | css`
color: #fff;
background-color: ${primaryButtonBackground};
border-color: ${primaryButtonBackground};
&:hover {
background-color: ${primaryButtonHoverBackground};
border-color: ${primaryButtonHoverBackground};
}
&:not(:disabled):not(.disabled):active {
background-color: ${primaryButtonActiveBackground};
border-color: ${primaryButtonActiveBackground};
}
&:not(:disabled):not(.disabled):active:focus {
box-shadow: 0 0 0 0.2rem ${primaryButtonFocusShadowColor};
}
`}
${({ theme }: ButtonProps) =>
theme === 'light' &&
css`
color: #212529;
background-color: #f8f9fa;
border-color: #f8f9fa;
&:hover {
background-color: #e2e6ea;
border-color: #dae0e5;
}
&:not(:disabled):not(.disabled):active {
background-color: #dae0e5;
border-color: #d3d9df;
}
&:not(:disabled):not(.disabled):active:focus {
box-shadow: 0 0 0 0.2rem rgba(216, 217, 219, 0.5);
}
`};
${({ theme }: ButtonProps) =>
theme === 'danger' &&
css`
color: white;
background-color: ${dangerButtonBackground};
border-color: ${dangerButtonBackground};
&:hover {
background-color: ${dangerButtonHoverBackground};
border-color: ${dangerButtonHoverBackground};
}
&:not(:disabled):not(.disabled):active {
background-color: ${dangerButtonActiveBackground};
border-color: ${dangerButtonActiveBackground};
}
&:not(:disabled):not(.disabled):active:focus {
box-shadow: 0 0 0 0.2rem ${dangerButtonFocusShadowColor};
}
`};
`;
interface ButtonProps {
onClick: () => void;
theme?: 'primary' | 'light' | 'danger';
style?: CSSProperties;
}
export const Button: FunctionComponent<ButtonProps> = memo(
({ children, onClick, theme, style }): ReactElement => {
return (
<StyledButton onClick={onClick} theme={theme || 'primary'} style={style}>
{children}
</StyledButton>
);
},
); | |
rectangle.rs | //! flash.geom.Rectangle
use crate::avm1::activation::Activation;
use crate::avm1::error::Error;
use crate::avm1::function::{Executable, FunctionObject};
use crate::avm1::globals::point::{construct_new_point, point_to_object, value_to_point};
use crate::avm1::property::Attribute;
use crate::avm1::{AvmString, Object, ScriptObject, TObject, Value};
use gc_arena::MutationContext;
fn constructor<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
if args.is_empty() {
this.set("x", 0.into(), activation)?;
this.set("y", 0.into(), activation)?;
this.set("width", 0.into(), activation)?;
this.set("height", 0.into(), activation)?;
} else {
this.set(
"x",
args.get(0).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
this.set(
"y",
args.get(1).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
this.set(
"width",
args.get(2).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
this.set(
"height",
args.get(3).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?; |
fn to_string<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?;
let y = this.get("y", activation)?;
let width = this.get("width", activation)?;
let height = this.get("height", activation)?;
Ok(AvmString::new(
activation.context.gc_context,
format!(
"(x={}, y={}, w={}, h={})",
x.coerce_to_string(activation)?,
y.coerce_to_string(activation)?,
width.coerce_to_string(activation)?,
height.coerce_to_string(activation)?
),
)
.into())
}
pub fn create_rectangle_object<'gc>(
gc_context: MutationContext<'gc, '_>,
rectangle_proto: Object<'gc>,
fn_proto: Option<Object<'gc>>,
) -> Object<'gc> {
FunctionObject::constructor(
gc_context,
Executable::Native(constructor),
constructor_to_fn!(constructor),
fn_proto,
rectangle_proto,
)
}
fn is_empty<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
Ok((width <= 0.0 || height <= 0.0 || width.is_nan() || height.is_nan()).into())
}
fn set_empty<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
this.set("x", 0.into(), activation)?;
this.set("y", 0.into(), activation)?;
this.set("width", 0.into(), activation)?;
this.set("height", 0.into(), activation)?;
Ok(Value::Undefined)
}
fn clone<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let args = [
this.get("x", activation)?,
this.get("y", activation)?,
this.get("width", activation)?,
this.get("height", activation)?,
];
let constructor = activation.context.avm1.prototypes.rectangle_constructor;
let cloned = constructor.construct(activation, &args)?;
Ok(cloned)
}
fn contains<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
// TODO: This arbitrarily should return `false` or `undefined` for different invalid-values.
// I can't find any rhyme or reason for it.
let x = args
.get(0)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
let y = args
.get(1)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
if x.is_nan() || y.is_nan() {
return Ok(Value::Undefined);
}
let left = this.get("x", activation)?.coerce_to_f64(activation)?;
let right = left + this.get("width", activation)?.coerce_to_f64(activation)?;
let top = this.get("y", activation)?.coerce_to_f64(activation)?;
let bottom = top + this.get("height", activation)?.coerce_to_f64(activation)?;
Ok((x >= left && x < right && y >= top && y < bottom).into())
}
fn contains_point<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let (x, y) = value_to_point(
args.get(0).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
if x.is_nan() || y.is_nan() {
return Ok(Value::Undefined);
}
let left = this.get("x", activation)?.coerce_to_f64(activation)?;
let right = left + this.get("width", activation)?.coerce_to_f64(activation)?;
let top = this.get("y", activation)?.coerce_to_f64(activation)?;
let bottom = top + this.get("height", activation)?.coerce_to_f64(activation)?;
Ok((x >= left && x < right && y >= top && y < bottom).into())
}
fn contains_rectangle<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let other = if let Some(Value::Object(other)) = args.get(0) {
other
} else {
return Ok(Value::Undefined);
};
let this_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let this_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let this_right = this_left + this.get("width", activation)?.coerce_to_f64(activation)?;
let this_bottom = this_top + this.get("height", activation)?.coerce_to_f64(activation)?;
let other_left = other.get("x", activation)?.coerce_to_f64(activation)?;
let other_top = other.get("y", activation)?.coerce_to_f64(activation)?;
let other_right = other_left + other.get("width", activation)?.coerce_to_f64(activation)?;
let other_bottom = other_top + other.get("height", activation)?.coerce_to_f64(activation)?;
if other_left.is_nan() || other_top.is_nan() || other_right.is_nan() || other_bottom.is_nan() {
return Ok(Value::Undefined);
}
Ok((other_left >= this_left
&& other_right <= this_right
&& other_top >= this_top
&& other_bottom <= this_bottom)
.into())
}
fn intersects<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let other = if let Some(Value::Object(other)) = args.get(0) {
other
} else {
return Ok(false.into());
};
let this_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let this_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let this_right = this_left + this.get("width", activation)?.coerce_to_f64(activation)?;
let this_bottom = this_top + this.get("height", activation)?.coerce_to_f64(activation)?;
let other_left = other.get("x", activation)?.coerce_to_f64(activation)?;
let other_top = other.get("y", activation)?.coerce_to_f64(activation)?;
let other_right = other_left + other.get("width", activation)?.coerce_to_f64(activation)?;
let other_bottom = other_top + other.get("height", activation)?.coerce_to_f64(activation)?;
Ok((this_left < other_right
&& this_right > other_left
&& this_top < other_bottom
&& this_bottom > other_top)
.into())
}
fn union<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let this_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let this_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let this_right = this_left + this.get("width", activation)?.coerce_to_f64(activation)?;
let this_bottom = this_top + this.get("height", activation)?.coerce_to_f64(activation)?;
let (other_left, other_top, other_width, other_height) =
if let Some(Value::Object(other)) = args.get(0) {
(
other.get("x", activation)?.coerce_to_f64(activation)?,
other.get("y", activation)?.coerce_to_f64(activation)?,
other.get("width", activation)?.coerce_to_f64(activation)?,
other.get("height", activation)?.coerce_to_f64(activation)?,
)
} else {
(f64::NAN, f64::NAN, f64::NAN, f64::NAN)
};
let other_right = other_left + other_width;
let other_bottom = other_top + other_height;
let left = if this_left.is_nan() {
this_left
} else if other_left.is_nan() {
other_left
} else {
this_left.min(other_left)
};
let top = if this_top.is_nan() {
this_top
} else if other_top.is_nan() {
other_top
} else {
this_top.min(other_top)
};
let width = if this_right.is_nan() {
this_right
} else if other_right.is_nan() {
other_right
} else {
this_right.max(other_right)
} - left;
let height = if this_bottom.is_nan() {
this_bottom
} else if other_bottom.is_nan() {
other_bottom
} else {
this_bottom.max(other_bottom)
} - top;
let args = [
Value::Number(left),
Value::Number(top),
Value::Number(width),
Value::Number(height),
];
let constructor = activation.context.avm1.prototypes.rectangle_constructor;
let result = constructor.construct(activation, &args)?;
Ok(result)
}
fn inflate<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
let horizontal = args
.get(0)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
let vertical = args
.get(1)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
this.set("x", Value::Number(x - horizontal), activation)?;
this.set("y", Value::Number(y - vertical), activation)?;
this.set("width", Value::Number(width + horizontal * 2.0), activation)?;
this.set("height", Value::Number(height + vertical * 2.0), activation)?;
Ok(Value::Undefined)
}
fn inflate_point<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
let (horizontal, vertical) = value_to_point(
args.get(0).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
this.set("x", Value::Number(x - horizontal), activation)?;
this.set("y", Value::Number(y - vertical), activation)?;
this.set("width", Value::Number(width + horizontal * 2.0), activation)?;
this.set("height", Value::Number(height + vertical * 2.0), activation)?;
Ok(Value::Undefined)
}
fn offset<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let horizontal = args
.get(0)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
let vertical = args
.get(1)
.unwrap_or(&Value::Undefined)
.to_owned()
.coerce_to_f64(activation)?;
this.set("x", Value::Number(x + horizontal), activation)?;
this.set("y", Value::Number(y + vertical), activation)?;
Ok(Value::Undefined)
}
fn offset_point<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let (horizontal, vertical) = value_to_point(
args.get(0).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
this.set("x", Value::Number(x + horizontal), activation)?;
this.set("y", Value::Number(y + vertical), activation)?;
Ok(Value::Undefined)
}
fn intersection<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let this_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let this_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let this_right = this_left + this.get("width", activation)?.coerce_to_f64(activation)?;
let this_bottom = this_top + this.get("height", activation)?.coerce_to_f64(activation)?;
let (other_left, other_top, other_width, other_height) =
if let Some(Value::Object(other)) = args.get(0) {
(
other.get("x", activation)?.coerce_to_f64(activation)?,
other.get("y", activation)?.coerce_to_f64(activation)?,
other.get("width", activation)?.coerce_to_f64(activation)?,
other.get("height", activation)?.coerce_to_f64(activation)?,
)
} else {
(f64::NAN, f64::NAN, f64::NAN, f64::NAN)
};
let other_right = other_left + other_width;
let other_bottom = other_top + other_height;
let (mut left, mut top, mut right, mut bottom) = if this_left.is_nan()
|| other_left.is_nan()
|| this_top.is_nan()
|| other_top.is_nan()
|| this_right.is_nan()
|| other_right.is_nan()
|| this_bottom.is_nan()
|| other_bottom.is_nan()
{
(0.0, 0.0, 0.0, 0.0)
} else {
(
this_left.max(other_left),
this_top.max(other_top),
this_right.min(other_right),
this_bottom.min(other_bottom),
)
};
if right <= left || bottom <= top {
right = 0.0;
left = 0.0;
bottom = 0.0;
top = 0.0;
}
let args = [
Value::Number(left),
Value::Number(top),
Value::Number(right - left),
Value::Number(bottom - top),
];
let constructor = activation.context.avm1.prototypes.rectangle_constructor;
let result = constructor.construct(activation, &args)?;
Ok(result)
}
fn equals<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
if let Some(Value::Object(other)) = args.get(0) {
let this_x = this.get("x", activation)?;
let this_y = this.get("y", activation)?;
let this_width = this.get("width", activation)?;
let this_height = this.get("height", activation)?;
let other_x = other.get("x", activation)?;
let other_y = other.get("y", activation)?;
let other_width = other.get("width", activation)?;
let other_height = other.get("height", activation)?;
let proto = activation.context.avm1.prototypes.rectangle;
let constructor = activation.context.avm1.prototypes.rectangle_constructor;
return Ok((this_x == other_x
&& this_y == other_y
&& this_width == other_width
&& this_height == other_height
&& other.is_instance_of(activation, constructor, proto)?)
.into());
}
Ok(false.into())
}
fn get_left<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
this.get("x", activation)
}
fn set_left<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let new_left = args.get(0).unwrap_or(&Value::Undefined).to_owned();
let old_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
this.set("x", new_left, activation)?;
this.set(
"width",
Value::Number(width + (old_left - new_left.coerce_to_f64(activation)?)),
activation,
)?;
Ok(Value::Undefined)
}
fn get_top<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
this.get("y", activation)
}
fn set_top<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let new_top = args.get(0).unwrap_or(&Value::Undefined).to_owned();
let old_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
this.set("y", new_top, activation)?;
this.set(
"height",
Value::Number(height + (old_top - new_top.coerce_to_f64(activation)?)),
activation,
)?;
Ok(Value::Undefined)
}
fn get_right<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
Ok((x + width).into())
}
fn set_right<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let right = if let Some(arg) = args.get(0) {
arg.coerce_to_f64(activation)?
} else {
f64::NAN
};
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
this.set("width", Value::Number(right - x), activation)?;
Ok(Value::Undefined)
}
fn get_bottom<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
Ok((y + height).into())
}
fn set_bottom<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let bottom = if let Some(arg) = args.get(0) {
arg.coerce_to_f64(activation)?
} else {
f64::NAN
};
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
this.set("height", Value::Number(bottom - y), activation)?;
Ok(Value::Undefined)
}
fn get_size<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let width = this.get("width", activation)?;
let height = this.get("height", activation)?;
let point = construct_new_point(&[width, height], activation)?;
Ok(point)
}
fn set_size<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let (width, height) = if let Some(Value::Object(object)) = args.get(0) {
(object.get("x", activation)?, object.get("y", activation)?)
} else {
(Value::Undefined, Value::Undefined)
};
this.set("width", width, activation)?;
this.set("height", height, activation)?;
Ok(Value::Undefined)
}
fn get_top_left<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?;
let y = this.get("y", activation)?;
let point = construct_new_point(&[x, y], activation)?;
Ok(point)
}
fn set_top_left<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let (new_left, new_top) = if let Some(Value::Object(object)) = args.get(0) {
(object.get("x", activation)?, object.get("y", activation)?)
} else {
(Value::Undefined, Value::Undefined)
};
let old_left = this.get("x", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
let old_top = this.get("y", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
this.set("x", new_left, activation)?;
this.set("y", new_top, activation)?;
this.set(
"width",
Value::Number(width + (old_left - new_left.coerce_to_f64(activation)?)),
activation,
)?;
this.set(
"height",
Value::Number(height + (old_top - new_top.coerce_to_f64(activation)?)),
activation,
)?;
Ok(Value::Undefined)
}
fn get_bottom_right<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
_args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let x = this.get("x", activation)?.coerce_to_f64(activation)?;
let y = this.get("y", activation)?.coerce_to_f64(activation)?;
let width = this.get("width", activation)?.coerce_to_f64(activation)?;
let height = this.get("height", activation)?.coerce_to_f64(activation)?;
let point = point_to_object((x + width, y + height), activation)?;
Ok(point)
}
fn set_bottom_right<'gc>(
activation: &mut Activation<'_, 'gc, '_>,
this: Object<'gc>,
args: &[Value<'gc>],
) -> Result<Value<'gc>, Error<'gc>> {
let (bottom, right) = value_to_point(
args.get(0).unwrap_or(&Value::Undefined).to_owned(),
activation,
)?;
let top = this.get("x", activation)?.coerce_to_f64(activation)?;
let left = this.get("y", activation)?.coerce_to_f64(activation)?;
this.set("width", Value::Number(bottom - top), activation)?;
this.set("height", Value::Number(right - left), activation)?;
Ok(Value::Undefined)
}
pub fn create_proto<'gc>(
gc_context: MutationContext<'gc, '_>,
proto: Object<'gc>,
fn_proto: Object<'gc>,
) -> Object<'gc> {
let mut object = ScriptObject::object(gc_context, Some(proto));
object.force_set_function(
"toString",
to_string,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"isEmpty",
is_empty,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"setEmpty",
set_empty,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"clone",
clone,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"contains",
contains,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"containsPoint",
contains_point,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"containsRectangle",
contains_rectangle,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"intersects",
intersects,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"union",
union,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"inflate",
inflate,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"inflatePoint",
inflate_point,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"offset",
offset,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"offsetPoint",
offset_point,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"intersection",
intersection,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.force_set_function(
"equals",
equals,
gc_context,
Attribute::empty(),
Some(fn_proto),
);
object.add_property(
gc_context,
"left",
FunctionObject::function(
gc_context,
Executable::Native(get_left),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_left),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"top",
FunctionObject::function(
gc_context,
Executable::Native(get_top),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_top),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"right",
FunctionObject::function(
gc_context,
Executable::Native(get_right),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_right),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"bottom",
FunctionObject::function(
gc_context,
Executable::Native(get_bottom),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_bottom),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"size",
FunctionObject::function(
gc_context,
Executable::Native(get_size),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_size),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"topLeft",
FunctionObject::function(
gc_context,
Executable::Native(get_top_left),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_top_left),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.add_property(
gc_context,
"bottomRight",
FunctionObject::function(
gc_context,
Executable::Native(get_bottom_right),
Some(fn_proto),
fn_proto,
),
Some(FunctionObject::function(
gc_context,
Executable::Native(set_bottom_right),
Some(fn_proto),
fn_proto,
)),
Attribute::DONT_DELETE | Attribute::DONT_ENUM,
);
object.into()
} | }
Ok(this.into())
} |
core.module.ts | import { NgModule, NO_ERRORS_SCHEMA } from "@angular/core";
// always import Shared
import { SharedModule } from "@shared/shared.module";
// next import RoutingModule
import { CoreRoutingModule } from "./core-routing.module";
// next import containers, components |
const CORE_CONTAINERS = [
HomeContainer,
];
@NgModule({
imports: [
SharedModule,
CoreRoutingModule,
],
declarations: [
CORE_COMPONENTS,
CORE_CONTAINERS,
],
schemas: [
NO_ERRORS_SCHEMA
]
})
export class HomeModule { } | import { HomeContainer } from "./containers";
const CORE_COMPONENTS = []; |
index.js | (function webpackUniversalModuleDefinition(root, factory) {
if(typeof exports === 'object' && typeof module === 'object')
module.exports = factory();
else if(typeof define === 'function' && define.amd)
define([], factory);
else if(typeof exports === 'object')
exports["dialog"] = factory();
else
root["cube"] = root["cube"] || {}, root["cube"]["dialog"] = factory();
})(typeof self !== 'undefined' ? self : this, function() {
return /******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId]) {
/******/ return installedModules[moduleId].exports;
/******/ }
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ i: moduleId,
/******/ l: false,
/******/ exports: {}
/******/ };
/******/
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/
/******/ // Flag the module as loaded
/******/ module.l = true;
/******/
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/
/******/
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/
/******/ // define getter function for harmony exports
/******/ __webpack_require__.d = function(exports, name, getter) {
/******/ if(!__webpack_require__.o(exports, name)) {
/******/ Object.defineProperty(exports, name, {
/******/ configurable: false,
/******/ enumerable: true,
/******/ get: getter
/******/ });
/******/ }
/******/ };
/******/
/******/ // getDefaultExport function for compatibility with non-harmony modules
/******/ __webpack_require__.n = function(module) {
/******/ var getter = module && module.__esModule ?
/******/ function getDefault() { return module['default']; } :
/******/ function getModuleExports() { return module; };
/******/ __webpack_require__.d(getter, 'a', getter);
/******/ return getter;
/******/ };
/******/
/******/ // Object.prototype.hasOwnProperty.call
/******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); };
/******/
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "./";
/******/
/******/ // Load entry module and return exports
/******/ return __webpack_require__(__webpack_require__.s = 294);
/******/ })
/************************************************************************/
/******/ ([
/* 0 */
/***/ (function(module, exports) {
var core = module.exports = { version: '2.5.7' };
if (typeof __e == 'number') __e = core; // eslint-disable-line no-undef
/***/ }),
/* 1 */
/***/ (function(module, exports) {
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global = module.exports = typeof window != 'undefined' && window.Math == Math
? window : typeof self != 'undefined' && self.Math == Math ? self
// eslint-disable-next-line no-new-func
: Function('return this')();
if (typeof __g == 'number') __g = global; // eslint-disable-line no-undef
/***/ }),
/* 2 */
/***/ (function(module, exports, __webpack_require__) {
var store = __webpack_require__(24)('wks');
var uid = __webpack_require__(17);
var Symbol = __webpack_require__(1).Symbol;
var USE_SYMBOL = typeof Symbol == 'function';
var $exports = module.exports = function (name) {
return store[name] || (store[name] =
USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : uid)('Symbol.' + name));
};
$exports.store = store;
/***/ }),
/* 3 */
/***/ (function(module, exports, __webpack_require__) {
// Thank's IE8 for his funny defineProperty
module.exports = !__webpack_require__(11)(function () {
return Object.defineProperty({}, 'a', { get: function () { return 7; } }).a != 7;
});
/***/ }),
/* 4 */
/***/ (function(module, exports, __webpack_require__) {
var anObject = __webpack_require__(12);
var IE8_DOM_DEFINE = __webpack_require__(33);
var toPrimitive = __webpack_require__(22);
var dP = Object.defineProperty;
exports.f = __webpack_require__(3) ? Object.defineProperty : function defineProperty(O, P, Attributes) {
anObject(O);
P = toPrimitive(P, true);
anObject(Attributes);
if (IE8_DOM_DEFINE) try {
return dP(O, P, Attributes);
} catch (e) { /* empty */ }
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported!');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
/***/ }),
/* 5 */
/***/ (function(module, exports) {
var hasOwnProperty = {}.hasOwnProperty;
module.exports = function (it, key) {
return hasOwnProperty.call(it, key);
};
/***/ }),
/* 6 */
/***/ (function(module, exports, __webpack_require__) {
var dP = __webpack_require__(4);
var createDesc = __webpack_require__(14);
module.exports = __webpack_require__(3) ? function (object, key, value) {
return dP.f(object, key, createDesc(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
/***/ }),
/* 7 */
/***/ (function(module, exports, __webpack_require__) {
// to indexed object, toObject with fallback for non-array-like ES3 strings
var IObject = __webpack_require__(40);
var defined = __webpack_require__(19);
module.exports = function (it) {
return IObject(defined(it));
};
/***/ }),
/* 8 */
/***/ (function(module, exports) {
module.exports = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
/***/ }),
/* 9 */
/***/ (function(module, exports) {
/* globals __VUE_SSR_CONTEXT__ */
// this module is a runtime utility for cleaner component module output and will
// be included in the final webpack user bundle
module.exports = function normalizeComponent (
rawScriptExports,
compiledTemplate,
injectStyles,
scopeId,
moduleIdentifier /* server only */
) {
var esModule
var scriptExports = rawScriptExports = rawScriptExports || {}
// ES6 modules interop
var type = typeof rawScriptExports.default
if (type === 'object' || type === 'function') {
esModule = rawScriptExports
scriptExports = rawScriptExports.default
}
// Vue.extend constructor export interop
var options = typeof scriptExports === 'function'
? scriptExports.options
: scriptExports
// render functions
if (compiledTemplate) {
options.render = compiledTemplate.render
options.staticRenderFns = compiledTemplate.staticRenderFns
}
// scopedId
if (scopeId) {
options._scopeId = scopeId
}
var hook
if (moduleIdentifier) { // server build
hook = function (context) {
// 2.3 injection
context =
context || // cached call
(this.$vnode && this.$vnode.ssrContext) || // stateful
(this.parent && this.parent.$vnode && this.parent.$vnode.ssrContext) // functional
// 2.2 with runInNewContext: true
if (!context && typeof __VUE_SSR_CONTEXT__ !== 'undefined') {
context = __VUE_SSR_CONTEXT__
}
// inject component styles
if (injectStyles) {
injectStyles.call(this, context)
}
// register component module identifier for async chunk inferrence
if (context && context._registeredComponents) {
context._registeredComponents.add(moduleIdentifier)
}
}
// used by ssr in case component is cached and beforeCreate
// never gets called
options._ssrRegister = hook
} else if (injectStyles) {
hook = injectStyles
}
if (hook) {
var functional = options.functional
var existing = functional
? options.render
: options.beforeCreate
if (!functional) {
// inject component registration as beforeCreate hook
options.beforeCreate = existing
? [].concat(existing, hook)
: [hook]
} else {
// register for functioal component in vue file
options.render = function renderWithStyleInjection (h, context) {
hook.call(context)
return existing(h, context)
}
}
}
return {
esModule: esModule,
exports: scriptExports,
options: options
}
}
/***/ }),
/* 10 */
/***/ (function(module, exports, __webpack_require__) {
var global = __webpack_require__(1);
var core = __webpack_require__(0);
var ctx = __webpack_require__(31);
var hide = __webpack_require__(6);
var has = __webpack_require__(5);
var PROTOTYPE = 'prototype';
var $export = function (type, name, source) {
var IS_FORCED = type & $export.F;
var IS_GLOBAL = type & $export.G;
var IS_STATIC = type & $export.S;
var IS_PROTO = type & $export.P;
var IS_BIND = type & $export.B;
var IS_WRAP = type & $export.W;
var exports = IS_GLOBAL ? core : core[name] || (core[name] = {});
var expProto = exports[PROTOTYPE];
var target = IS_GLOBAL ? global : IS_STATIC ? global[name] : (global[name] || {})[PROTOTYPE];
var key, own, out;
if (IS_GLOBAL) source = name;
for (key in source) {
// contains in native
own = !IS_FORCED && target && target[key] !== undefined;
if (own && has(exports, key)) continue;
// export native or passed
out = own ? target[key] : source[key];
// prevent global pollution for namespaces
exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key]
// bind timers to global for call from export context
: IS_BIND && own ? ctx(out, global)
// wrap global constructors for prevent change them in library
: IS_WRAP && target[key] == out ? (function (C) {
var F = function (a, b, c) {
if (this instanceof C) {
switch (arguments.length) {
case 0: return new C();
case 1: return new C(a);
case 2: return new C(a, b);
} return new C(a, b, c);
} return C.apply(this, arguments);
};
F[PROTOTYPE] = C[PROTOTYPE];
return F;
// make static versions for prototype methods
})(out) : IS_PROTO && typeof out == 'function' ? ctx(Function.call, out) : out;
// export proto methods to core.%CONSTRUCTOR%.methods.%NAME%
if (IS_PROTO) {
(exports.virtual || (exports.virtual = {}))[key] = out;
// export proto methods to core.%CONSTRUCTOR%.prototype.%NAME%
if (type & $export.R && expProto && !expProto[key]) hide(expProto, key, out);
}
}
};
// type bitmap
$export.F = 1; // forced
$export.G = 2; // global
$export.S = 4; // static
$export.P = 8; // proto
$export.B = 16; // bind
$export.W = 32; // wrap
$export.U = 64; // safe
$export.R = 128; // real proto method for `library`
module.exports = $export;
/***/ }),
/* 11 */
/***/ (function(module, exports) {
module.exports = function (exec) {
try {
return !!exec();
} catch (e) {
return true;
}
};
/***/ }),
/* 12 */
/***/ (function(module, exports, __webpack_require__) {
var isObject = __webpack_require__(8);
module.exports = function (it) {
if (!isObject(it)) throw TypeError(it + ' is not an object!');
return it;
};
/***/ }),
/* 13 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.2.14 / 15.2.3.14 Object.keys(O)
var $keys = __webpack_require__(36);
var enumBugKeys = __webpack_require__(25);
module.exports = Object.keys || function keys(O) {
return $keys(O, enumBugKeys);
};
/***/ }),
/* 14 */
/***/ (function(module, exports) {
module.exports = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
/***/ }),
/* 15 */
/***/ (function(module, exports) {
module.exports = true;
/***/ }),
/* 16 */
/***/ (function(module, exports) {
module.exports = {};
/***/ }),
/* 17 */
/***/ (function(module, exports) {
var id = 0;
var px = Math.random();
module.exports = function (key) {
return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
};
/***/ }),
/* 18 */
/***/ (function(module, exports) {
exports.f = {}.propertyIsEnumerable;
/***/ }),
/* 19 */
/***/ (function(module, exports) {
// 7.2.1 RequireObjectCoercible(argument)
module.exports = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
/***/ }),
/* 20 */
/***/ (function(module, exports) {
// 7.1.4 ToInteger
var ceil = Math.ceil;
var floor = Math.floor;
module.exports = function (it) {
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
};
/***/ }),
/* 21 */
/***/ (function(module, exports) {
var toString = {}.toString;
module.exports = function (it) {
return toString.call(it).slice(8, -1);
};
/***/ }),
/* 22 */
/***/ (function(module, exports, __webpack_require__) {
// 7.1.1 ToPrimitive(input [, PreferredType])
var isObject = __webpack_require__(8);
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
module.exports = function (it, S) {
if (!isObject(it)) return it;
var fn, val;
if (S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
if (typeof (fn = it.valueOf) == 'function' && !isObject(val = fn.call(it))) return val;
if (!S && typeof (fn = it.toString) == 'function' && !isObject(val = fn.call(it))) return val;
throw TypeError("Can't convert object to primitive value");
};
/***/ }),
/* 23 */
/***/ (function(module, exports, __webpack_require__) {
var shared = __webpack_require__(24)('keys');
var uid = __webpack_require__(17);
module.exports = function (key) {
return shared[key] || (shared[key] = uid(key));
};
/***/ }),
/* 24 */
/***/ (function(module, exports, __webpack_require__) {
var core = __webpack_require__(0);
var global = __webpack_require__(1);
var SHARED = '__core-js_shared__';
var store = global[SHARED] || (global[SHARED] = {});
(module.exports = function (key, value) {
return store[key] || (store[key] = value !== undefined ? value : {});
})('versions', []).push({
version: core.version,
mode: __webpack_require__(15) ? 'pure' : 'global',
copyright: '© 2018 Denis Pushkarev (zloirock.ru)'
});
/***/ }),
/* 25 */
/***/ (function(module, exports) {
// IE 8- don't enum bug keys
module.exports = (
'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'
).split(',');
/***/ }),
/* 26 */
/***/ (function(module, exports, __webpack_require__) {
var def = __webpack_require__(4).f;
var has = __webpack_require__(5);
var TAG = __webpack_require__(2)('toStringTag');
module.exports = function (it, tag, stat) {
if (it && !has(it = stat ? it : it.prototype, TAG)) def(it, TAG, { configurable: true, value: tag });
};
/***/ }),
/* 27 */
/***/ (function(module, exports, __webpack_require__) {
// 7.1.13 ToObject(argument)
var defined = __webpack_require__(19);
module.exports = function (it) {
return Object(defined(it));
};
/***/ }),
/* 28 */
/***/ (function(module, exports, __webpack_require__) {
var isObject = __webpack_require__(8);
var document = __webpack_require__(1).document;
// typeof document.createElement is 'object' in old IE
var is = isObject(document) && isObject(document.createElement);
module.exports = function (it) {
return is ? document.createElement(it) : {};
};
/***/ }),
/* 29 */
/***/ (function(module, exports, __webpack_require__) {
exports.f = __webpack_require__(2);
/***/ }),
/* 30 */
/***/ (function(module, exports, __webpack_require__) {
var global = __webpack_require__(1);
var core = __webpack_require__(0);
var LIBRARY = __webpack_require__(15);
var wksExt = __webpack_require__(29);
var defineProperty = __webpack_require__(4).f;
module.exports = function (name) {
var $Symbol = core.Symbol || (core.Symbol = LIBRARY ? {} : global.Symbol || {});
if (name.charAt(0) != '_' && !(name in $Symbol)) defineProperty($Symbol, name, { value: wksExt.f(name) });
};
/***/ }),
/* 31 */
/***/ (function(module, exports, __webpack_require__) {
// optional / simple context binding
var aFunction = __webpack_require__(34);
module.exports = function (fn, that, length) {
aFunction(fn);
if (that === undefined) return fn;
switch (length) {
case 1: return function (a) {
return fn.call(that, a);
};
case 2: return function (a, b) {
return fn.call(that, a, b);
};
case 3: return function (a, b, c) {
return fn.call(that, a, b, c);
};
}
return function (/* ...args */) {
return fn.apply(that, arguments);
};
};
/***/ }),
/* 32 */
/***/ (function(module, exports) {
exports.f = Object.getOwnPropertySymbols;
/***/ }),
/* 33 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = !__webpack_require__(3) && !__webpack_require__(11)(function () {
return Object.defineProperty(__webpack_require__(28)('div'), 'a', { get: function () { return 7; } }).a != 7;
});
/***/ }),
/* 34 */
/***/ (function(module, exports) {
module.exports = function (it) {
if (typeof it != 'function') throw TypeError(it + ' is not a function!');
return it;
};
/***/ }),
/* 35 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [exports, __webpack_require__(48), __webpack_require__(37), __webpack_require__(53), __webpack_require__(47)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(exports, require('babel-runtime/core-js/object/keys'), require('babel-runtime/helpers/defineProperty'), require('babel-runtime/helpers/typeof'), require('../lang/string'));
} else {
var mod = {
exports: {}
};
factory(mod.exports, global.keys, global.defineProperty, global._typeof, global.string);
global.util = mod.exports;
}
})(this, function (exports, _keys, _defineProperty2, _typeof2, _string) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.getIOSVersion = exports.isNumber = exports.isObject = exports.isString = exports.isArray = exports.isFunc = exports.isUndef = exports.parsePath = exports.processComponentName = exports.debounce = exports.cb2PromiseWithResolve = exports.parallel = exports.resetTypeValue = exports.createAddAPI = exports.deepAssign = exports.findIndex = undefined;
var _keys2 = _interopRequireDefault(_keys);
var _defineProperty3 = _interopRequireDefault(_defineProperty2);
var _typeof3 = _interopRequireDefault(_typeof2);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function findIndex(ary, fn) {
if (ary.findIndex) {
return ary.findIndex(fn);
}
var index = -1;
ary.some(function (item, i, ary) {
var ret = fn.call(this, item, i, ary);
if (ret) {
index = i;
return ret;
}
});
return index;
}
function deepAssign(to, from) {
for (var key in from) {
if (!to[key] || (0, _typeof3.default)(to[key]) !== 'object') {
to[key] = from[key];
} else {
deepAssign(to[key], from[key]);
}
}
}
function createAddAPI(baseObj) {
return function add() {
for (var _len = arguments.length, args = Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
if (typeof args[0] === 'string') {
args[0] = (0, _defineProperty3.default)({}, args[0], args[1]);
}
deepAssign(baseObj, args[0]);
};
}
function judgeTypeFnCreator(type) {
var toString = Object.prototype.toString;
return function isType(o) {
return toString.call(o) === '[object ' + type + ']';
};
}
var typesReset = {
_set: function _set(obj, key, value) {
obj[key] = value;
},
string: function string(obj, key) {
typesReset._set(obj, key, '');
},
number: function number(obj, key) {
typesReset._set(obj, key, 0);
},
boolean: function boolean(obj, key) {
typesReset._set(obj, key, false);
},
object: function object(obj, key, value) {
if (Array.isArray(value)) {
typesReset._set(obj, key, []);
} else {
typesReset._set(obj, key, {});
}
}
};
function resetTypeValue(obj, key, defVal) {
if (defVal !== undefined) {
return typesReset._set(obj, key, defVal);
}
if (key) {
var value = obj[key];
var resetHandler = typesReset[typeof value === 'undefined' ? 'undefined' : (0, _typeof3.default)(value)];
resetHandler && resetHandler(obj, key, value);
} else {
(0, _keys2.default)(obj).forEach(function (key) {
resetTypeValue(obj, key);
});
}
}
function parallel(tasks, cb) {
var doneCount = 0;
var results = [];
var tasksLen = tasks.length;
if (!tasksLen) {
return cb(results);
}
tasks.forEach(function (task, i) {
task(function (ret) {
doneCount += 1;
results[i] = ret;
if (doneCount === tasksLen) {
cb(results);
}
});
});
}
function cb2PromiseWithResolve(cb) {
var promise = void 0;
if (typeof window.Promise !== 'undefined') {
var _cb = cb;
promise = new window.Promise(function (resolve) {
cb = function cb(data) {
_cb && _cb(data);
resolve(data);
};
});
promise.resolve = cb;
}
return promise;
}
function debounce(func, wait, immediate, initValue) {
var timeout = void 0;
var result = initValue;
var later = function later(context, args) {
timeout = null;
if (args) {
result = func.apply(context, args);
}
};
var debounced = function debounced() {
var _this = this;
for (var _len2 = arguments.length, args = Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
if (timeout) {
clearTimeout(timeout);
}
if (immediate) {
var callNow = !timeout;
timeout = setTimeout(later, wait);
if (callNow) {
result = func.apply(this, args);
}
} else {
timeout = setTimeout(function () {
later(_this, args);
}, wait);
}
return result;
};
debounced.cancel = function () {
clearTimeout(timeout);
timeout = null;
};
return debounced;
}
function processComponentName(Component) {
var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
_ref$prefix = _ref.prefix,
prefix = _ref$prefix === undefined ? '' : _ref$prefix,
_ref$firstUpperCase = _ref.firstUpperCase,
firstUpperCase = _ref$firstUpperCase === undefined ? false : _ref$firstUpperCase;
var name = Component.name;
var pureName = name.replace(/^cube-/i, '');
var camelizeName = '' + (0, _string.camelize)('' + prefix + pureName);
if (firstUpperCase) {
camelizeName = camelizeName.charAt(0).toUpperCase() + camelizeName.slice(1);
}
return camelizeName;
}
function parsePath(obj) {
var path = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : '';
var segments = path.split('.');
var result = obj;
for (var i = 0; i < segments.length; i++) {
var key = segments[i];
if (isUndef(result[key])) {
result = '';
break;
} else {
result = result[key];
}
}
return result;
}
function getIOSVersion(ua) {
var regex = /os (\d\d?_\d(_\d)?)/;
var matches = regex.exec(ua);
if (!matches) return null;
var parts = matches[1].split('_').map(function (item) {
return parseInt(item, 10);
});
return {
major: parts[0],
minor: parts[1],
patch: parts[2] || 0
};
}
var isFunc = judgeTypeFnCreator('Function');
var isUndef = judgeTypeFnCreator('Undefined');
var isArray = judgeTypeFnCreator('Array');
var isString = judgeTypeFnCreator('String');
var isObject = judgeTypeFnCreator('Object');
var isNumber = judgeTypeFnCreator('Number');
exports.findIndex = findIndex;
exports.deepAssign = deepAssign;
exports.createAddAPI = createAddAPI;
exports.resetTypeValue = resetTypeValue;
exports.parallel = parallel;
exports.cb2PromiseWithResolve = cb2PromiseWithResolve;
exports.debounce = debounce;
exports.processComponentName = processComponentName;
exports.parsePath = parsePath;
exports.isUndef = isUndef;
exports.isFunc = isFunc;
exports.isArray = isArray;
exports.isString = isString;
exports.isObject = isObject;
exports.isNumber = isNumber;
exports.getIOSVersion = getIOSVersion;
});
/***/ }),
/* 36 */
/***/ (function(module, exports, __webpack_require__) {
var has = __webpack_require__(5);
var toIObject = __webpack_require__(7);
var arrayIndexOf = __webpack_require__(50)(false);
var IE_PROTO = __webpack_require__(23)('IE_PROTO');
module.exports = function (object, names) {
var O = toIObject(object);
var i = 0;
var result = [];
var key;
for (key in O) if (key != IE_PROTO) has(O, key) && result.push(key);
// Don't enum bug & hidden keys
while (names.length > i) if (has(O, key = names[i++])) {
~arrayIndexOf(result, key) || result.push(key);
}
return result;
};
/***/ }),
/* 37 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = true;
var _defineProperty = __webpack_require__(55);
var _defineProperty2 = _interopRequireDefault(_defineProperty);
function _ | obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = function (obj, key, value) {
if (key in obj) {
(0, _defineProperty2.default)(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
};
/***/ }),
/* 38 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var $at = __webpack_require__(62)(true);
// 21.1.3.27 String.prototype[@@iterator]()
__webpack_require__(42)(String, 'String', function (iterated) {
this._t = String(iterated); // target
this._i = 0; // next index
// 21.1.5.2.1 %StringIteratorPrototype%.next()
}, function () {
var O = this._t;
var index = this._i;
var point;
if (index >= O.length) return { value: undefined, done: true };
point = $at(O, index);
this._i += point.length;
return { value: point, done: false };
});
/***/ }),
/* 39 */
/***/ (function(module, exports, __webpack_require__) {
/* WEBPACK VAR INJECTION */(function(process) {var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(exports);
} else {
var mod = {
exports: {}
};
factory(mod.exports);
global.debug = mod.exports;
}
})(this, function (exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var warn = exports.warn = function warn(msg, componentName) {
if (process.env.NODE_ENV !== 'production') {
var component = componentName ? '<' + componentName + '> ' : '';
console.error('[Cube warn]: ' + component + msg);
}
};
var tip = exports.tip = function tip(msg, componentName) {
if (process.env.NODE_ENV !== 'production') {
var component = componentName ? '<' + componentName + '> ' : '';
console.warn('[Cube tip]: ' + component + msg);
}
};
});
/* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(83)))
/***/ }),
/* 40 */
/***/ (function(module, exports, __webpack_require__) {
// fallback for non-array-like ES3 and non-enumerable old V8 strings
var cof = __webpack_require__(21);
// eslint-disable-next-line no-prototype-builtins
module.exports = Object('z').propertyIsEnumerable(0) ? Object : function (it) {
return cof(it) == 'String' ? it.split('') : Object(it);
};
/***/ }),
/* 41 */
/***/ (function(module, exports, __webpack_require__) {
// 7.1.15 ToLength
var toInteger = __webpack_require__(20);
var min = Math.min;
module.exports = function (it) {
return it > 0 ? min(toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991
};
/***/ }),
/* 42 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var LIBRARY = __webpack_require__(15);
var $export = __webpack_require__(10);
var redefine = __webpack_require__(43);
var hide = __webpack_require__(6);
var Iterators = __webpack_require__(16);
var $iterCreate = __webpack_require__(63);
var setToStringTag = __webpack_require__(26);
var getPrototypeOf = __webpack_require__(65);
var ITERATOR = __webpack_require__(2)('iterator');
var BUGGY = !([].keys && 'next' in [].keys()); // Safari has buggy iterators w/o `next`
var FF_ITERATOR = '@@iterator';
var KEYS = 'keys';
var VALUES = 'values';
var returnThis = function () { return this; };
module.exports = function (Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) {
$iterCreate(Constructor, NAME, next);
var getMethod = function (kind) {
if (!BUGGY && kind in proto) return proto[kind];
switch (kind) {
case KEYS: return function keys() { return new Constructor(this, kind); };
case VALUES: return function values() { return new Constructor(this, kind); };
} return function entries() { return new Constructor(this, kind); };
};
var TAG = NAME + ' Iterator';
var DEF_VALUES = DEFAULT == VALUES;
var VALUES_BUG = false;
var proto = Base.prototype;
var $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT];
var $default = $native || getMethod(DEFAULT);
var $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined;
var $anyNative = NAME == 'Array' ? proto.entries || $native : $native;
var methods, key, IteratorPrototype;
// Fix native
if ($anyNative) {
IteratorPrototype = getPrototypeOf($anyNative.call(new Base()));
if (IteratorPrototype !== Object.prototype && IteratorPrototype.next) {
// Set @@toStringTag to native iterators
setToStringTag(IteratorPrototype, TAG, true);
// fix for some old engines
if (!LIBRARY && typeof IteratorPrototype[ITERATOR] != 'function') hide(IteratorPrototype, ITERATOR, returnThis);
}
}
// fix Array#{values, @@iterator}.name in V8 / FF
if (DEF_VALUES && $native && $native.name !== VALUES) {
VALUES_BUG = true;
$default = function values() { return $native.call(this); };
}
// Define iterator
if ((!LIBRARY || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])) {
hide(proto, ITERATOR, $default);
}
// Plug for library
Iterators[NAME] = $default;
Iterators[TAG] = returnThis;
if (DEFAULT) {
methods = {
values: DEF_VALUES ? $default : getMethod(VALUES),
keys: IS_SET ? $default : getMethod(KEYS),
entries: $entries
};
if (FORCED) for (key in methods) {
if (!(key in proto)) redefine(proto, key, methods[key]);
} else $export($export.P + $export.F * (BUGGY || VALUES_BUG), NAME, methods);
}
return methods;
};
/***/ }),
/* 43 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = __webpack_require__(6);
/***/ }),
/* 44 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.2.2 / 15.2.3.5 Object.create(O [, Properties])
var anObject = __webpack_require__(12);
var dPs = __webpack_require__(64);
var enumBugKeys = __webpack_require__(25);
var IE_PROTO = __webpack_require__(23)('IE_PROTO');
var Empty = function () { /* empty */ };
var PROTOTYPE = 'prototype';
// Create object with fake `null` prototype: use iframe Object with cleared prototype
var createDict = function () {
// Thrash, waste and sodomy: IE GC bug
var iframe = __webpack_require__(28)('iframe');
var i = enumBugKeys.length;
var lt = '<';
var gt = '>';
var iframeDocument;
iframe.style.display = 'none';
__webpack_require__(52).appendChild(iframe);
iframe.src = 'javascript:'; // eslint-disable-line no-script-url
// createDict = iframe.contentWindow.Object;
// html.removeChild(iframe);
iframeDocument = iframe.contentWindow.document;
iframeDocument.open();
iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
iframeDocument.close();
createDict = iframeDocument.F;
while (i--) delete createDict[PROTOTYPE][enumBugKeys[i]];
return createDict();
};
module.exports = Object.create || function create(O, Properties) {
var result;
if (O !== null) {
Empty[PROTOTYPE] = anObject(O);
result = new Empty();
Empty[PROTOTYPE] = null;
// add "__proto__" for Object.getPrototypeOf polyfill
result[IE_PROTO] = O;
} else result = createDict();
return Properties === undefined ? result : dPs(result, Properties);
};
/***/ }),
/* 45 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(66);
var global = __webpack_require__(1);
var hide = __webpack_require__(6);
var Iterators = __webpack_require__(16);
var TO_STRING_TAG = __webpack_require__(2)('toStringTag');
var DOMIterables = ('CSSRuleList,CSSStyleDeclaration,CSSValueList,ClientRectList,DOMRectList,DOMStringList,' +
'DOMTokenList,DataTransferItemList,FileList,HTMLAllCollection,HTMLCollection,HTMLFormElement,HTMLSelectElement,' +
'MediaList,MimeTypeArray,NamedNodeMap,NodeList,PaintRequestList,Plugin,PluginArray,SVGLengthList,SVGNumberList,' +
'SVGPathSegList,SVGPointList,SVGStringList,SVGTransformList,SourceBufferList,StyleSheetList,TextTrackCueList,' +
'TextTrackList,TouchList').split(',');
for (var i = 0; i < DOMIterables.length; i++) {
var NAME = DOMIterables[i];
var Collection = global[NAME];
var proto = Collection && Collection.prototype;
if (proto && !proto[TO_STRING_TAG]) hide(proto, TO_STRING_TAG, NAME);
Iterators[NAME] = Iterators.Array;
}
/***/ }),
/* 46 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.2.7 / 15.2.3.4 Object.getOwnPropertyNames(O)
var $keys = __webpack_require__(36);
var hiddenKeys = __webpack_require__(25).concat('length', 'prototype');
exports.f = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return $keys(O, hiddenKeys);
};
/***/ }),
/* 47 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(exports);
} else {
var mod = {
exports: {}
};
factory(mod.exports);
global.string = mod.exports;
}
})(this, function (exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.camelize = camelize;
exports.kebab = kebab;
var camelizeRE = /-(\w)/g;
function camelize(str) {
str = String(str);
return str.replace(camelizeRE, function (m, c) {
return c ? c.toUpperCase() : '';
});
}
function kebab(str) {
str = String(str);
return str.replace(/([A-Z])/g, '-$1').toLowerCase();
}
});
/***/ }),
/* 48 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(59), __esModule: true };
/***/ }),
/* 49 */,
/* 50 */
/***/ (function(module, exports, __webpack_require__) {
// false -> Array#indexOf
// true -> Array#includes
var toIObject = __webpack_require__(7);
var toLength = __webpack_require__(41);
var toAbsoluteIndex = __webpack_require__(51);
module.exports = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = toIObject($this);
var length = toLength(O.length);
var index = toAbsoluteIndex(fromIndex, length);
var value;
// Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++];
// eslint-disable-next-line no-self-compare
if (value != value) return true;
// Array#indexOf ignores holes, Array#includes - not
} else for (;length > index; index++) if (IS_INCLUDES || index in O) {
if (O[index] === el) return IS_INCLUDES || index || 0;
} return !IS_INCLUDES && -1;
};
};
/***/ }),
/* 51 */
/***/ (function(module, exports, __webpack_require__) {
var toInteger = __webpack_require__(20);
var max = Math.max;
var min = Math.min;
module.exports = function (index, length) {
index = toInteger(index);
return index < 0 ? max(index + length, 0) : min(index, length);
};
/***/ }),
/* 52 */
/***/ (function(module, exports, __webpack_require__) {
var document = __webpack_require__(1).document;
module.exports = document && document.documentElement;
/***/ }),
/* 53 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = true;
var _iterator = __webpack_require__(70);
var _iterator2 = _interopRequireDefault(_iterator);
var _symbol = __webpack_require__(72);
var _symbol2 = _interopRequireDefault(_symbol);
var _typeof = typeof _symbol2.default === "function" && typeof _iterator2.default === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof _symbol2.default === "function" && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : typeof obj; };
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = typeof _symbol2.default === "function" && _typeof(_iterator2.default) === "symbol" ? function (obj) {
return typeof obj === "undefined" ? "undefined" : _typeof(obj);
} : function (obj) {
return obj && typeof _symbol2.default === "function" && obj.constructor === _symbol2.default && obj !== _symbol2.default.prototype ? "symbol" : typeof obj === "undefined" ? "undefined" : _typeof(obj);
};
/***/ }),
/* 54 */
/***/ (function(module, exports) {
/***/ }),
/* 55 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(56), __esModule: true };
/***/ }),
/* 56 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(57);
var $Object = __webpack_require__(0).Object;
module.exports = function defineProperty(it, key, desc) {
return $Object.defineProperty(it, key, desc);
};
/***/ }),
/* 57 */
/***/ (function(module, exports, __webpack_require__) {
var $export = __webpack_require__(10);
// 19.1.2.4 / 15.2.3.6 Object.defineProperty(O, P, Attributes)
$export($export.S + $export.F * !__webpack_require__(3), 'Object', { defineProperty: __webpack_require__(4).f });
/***/ }),
/* 58 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.visibility = mod.exports;
}
})(this, function (module, exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var EVENT_TOGGLE = 'toggle';
exports.default = {
model: {
prop: 'visible',
event: EVENT_TOGGLE
},
props: {
visible: {
type: Boolean,
default: false
}
},
data: function data() {
return {
isVisible: false
};
},
watch: {
isVisible: function isVisible(newVal) {
this.$emit(EVENT_TOGGLE, newVal);
}
},
mounted: function mounted() {
var _this = this;
this.$watch('visible', function (newVal, oldVal) {
if (newVal) {
_this.show();
} else if (oldVal && !_this._createAPI_reuse) {
_this.hide();
}
}, {
immediate: true
});
},
methods: {
show: function show() {
this.isVisible = true;
},
hide: function hide() {
this.isVisible = false;
}
}
};
module.exports = exports['default'];
});
/***/ }),
/* 59 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(60);
module.exports = __webpack_require__(0).Object.keys;
/***/ }),
/* 60 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.2.14 Object.keys(O)
var toObject = __webpack_require__(27);
var $keys = __webpack_require__(13);
__webpack_require__(61)('keys', function () {
return function keys(it) {
return $keys(toObject(it));
};
});
/***/ }),
/* 61 */
/***/ (function(module, exports, __webpack_require__) {
// most Object methods by ES6 should accept primitives
var $export = __webpack_require__(10);
var core = __webpack_require__(0);
var fails = __webpack_require__(11);
module.exports = function (KEY, exec) {
var fn = (core.Object || {})[KEY] || Object[KEY];
var exp = {};
exp[KEY] = exec(fn);
$export($export.S + $export.F * fails(function () { fn(1); }), 'Object', exp);
};
/***/ }),
/* 62 */
/***/ (function(module, exports, __webpack_require__) {
var toInteger = __webpack_require__(20);
var defined = __webpack_require__(19);
// true -> String#at
// false -> String#codePointAt
module.exports = function (TO_STRING) {
return function (that, pos) {
var s = String(defined(that));
var i = toInteger(pos);
var l = s.length;
var a, b;
if (i < 0 || i >= l) return TO_STRING ? '' : undefined;
a = s.charCodeAt(i);
return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff
? TO_STRING ? s.charAt(i) : a
: TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
};
};
/***/ }),
/* 63 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var create = __webpack_require__(44);
var descriptor = __webpack_require__(14);
var setToStringTag = __webpack_require__(26);
var IteratorPrototype = {};
// 25.1.2.1.1 %IteratorPrototype%[@@iterator]()
__webpack_require__(6)(IteratorPrototype, __webpack_require__(2)('iterator'), function () { return this; });
module.exports = function (Constructor, NAME, next) {
Constructor.prototype = create(IteratorPrototype, { next: descriptor(1, next) });
setToStringTag(Constructor, NAME + ' Iterator');
};
/***/ }),
/* 64 */
/***/ (function(module, exports, __webpack_require__) {
var dP = __webpack_require__(4);
var anObject = __webpack_require__(12);
var getKeys = __webpack_require__(13);
module.exports = __webpack_require__(3) ? Object.defineProperties : function defineProperties(O, Properties) {
anObject(O);
var keys = getKeys(Properties);
var length = keys.length;
var i = 0;
var P;
while (length > i) dP.f(O, P = keys[i++], Properties[P]);
return O;
};
/***/ }),
/* 65 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.2.9 / 15.2.3.2 Object.getPrototypeOf(O)
var has = __webpack_require__(5);
var toObject = __webpack_require__(27);
var IE_PROTO = __webpack_require__(23)('IE_PROTO');
var ObjectProto = Object.prototype;
module.exports = Object.getPrototypeOf || function (O) {
O = toObject(O);
if (has(O, IE_PROTO)) return O[IE_PROTO];
if (typeof O.constructor == 'function' && O instanceof O.constructor) {
return O.constructor.prototype;
} return O instanceof Object ? ObjectProto : null;
};
/***/ }),
/* 66 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
var addToUnscopables = __webpack_require__(67);
var step = __webpack_require__(68);
var Iterators = __webpack_require__(16);
var toIObject = __webpack_require__(7);
// 22.1.3.4 Array.prototype.entries()
// 22.1.3.13 Array.prototype.keys()
// 22.1.3.29 Array.prototype.values()
// 22.1.3.30 Array.prototype[@@iterator]()
module.exports = __webpack_require__(42)(Array, 'Array', function (iterated, kind) {
this._t = toIObject(iterated); // target
this._i = 0; // next index
this._k = kind; // kind
// 22.1.5.2.1 %ArrayIteratorPrototype%.next()
}, function () {
var O = this._t;
var kind = this._k;
var index = this._i++;
if (!O || index >= O.length) {
this._t = undefined;
return step(1);
}
if (kind == 'keys') return step(0, index);
if (kind == 'values') return step(0, O[index]);
return step(0, [index, O[index]]);
}, 'values');
// argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7)
Iterators.Arguments = Iterators.Array;
addToUnscopables('keys');
addToUnscopables('values');
addToUnscopables('entries');
/***/ }),
/* 67 */
/***/ (function(module, exports) {
module.exports = function () { /* empty */ };
/***/ }),
/* 68 */
/***/ (function(module, exports) {
module.exports = function (done, value) {
return { value: value, done: !!done };
};
/***/ }),
/* 69 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.popup = mod.exports;
}
})(this, function (module, exports) {
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = {
props: {
zIndex: {
type: Number,
default: 100
},
maskClosable: {
type: Boolean,
default: false
}
}
};
module.exports = exports["default"];
});
/***/ }),
/* 70 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(71), __esModule: true };
/***/ }),
/* 71 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(38);
__webpack_require__(45);
module.exports = __webpack_require__(29).f('iterator');
/***/ }),
/* 72 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(73), __esModule: true };
/***/ }),
/* 73 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(74);
__webpack_require__(54);
__webpack_require__(80);
__webpack_require__(81);
module.exports = __webpack_require__(0).Symbol;
/***/ }),
/* 74 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
// ECMAScript 6 symbols shim
var global = __webpack_require__(1);
var has = __webpack_require__(5);
var DESCRIPTORS = __webpack_require__(3);
var $export = __webpack_require__(10);
var redefine = __webpack_require__(43);
var META = __webpack_require__(75).KEY;
var $fails = __webpack_require__(11);
var shared = __webpack_require__(24);
var setToStringTag = __webpack_require__(26);
var uid = __webpack_require__(17);
var wks = __webpack_require__(2);
var wksExt = __webpack_require__(29);
var wksDefine = __webpack_require__(30);
var enumKeys = __webpack_require__(76);
var isArray = __webpack_require__(77);
var anObject = __webpack_require__(12);
var isObject = __webpack_require__(8);
var toIObject = __webpack_require__(7);
var toPrimitive = __webpack_require__(22);
var createDesc = __webpack_require__(14);
var _create = __webpack_require__(44);
var gOPNExt = __webpack_require__(78);
var $GOPD = __webpack_require__(79);
var $DP = __webpack_require__(4);
var $keys = __webpack_require__(13);
var gOPD = $GOPD.f;
var dP = $DP.f;
var gOPN = gOPNExt.f;
var $Symbol = global.Symbol;
var $JSON = global.JSON;
var _stringify = $JSON && $JSON.stringify;
var PROTOTYPE = 'prototype';
var HIDDEN = wks('_hidden');
var TO_PRIMITIVE = wks('toPrimitive');
var isEnum = {}.propertyIsEnumerable;
var SymbolRegistry = shared('symbol-registry');
var AllSymbols = shared('symbols');
var OPSymbols = shared('op-symbols');
var ObjectProto = Object[PROTOTYPE];
var USE_NATIVE = typeof $Symbol == 'function';
var QObject = global.QObject;
// Don't use setters in Qt Script, https://github.com/zloirock/core-js/issues/173
var setter = !QObject || !QObject[PROTOTYPE] || !QObject[PROTOTYPE].findChild;
// fallback for old Android, https://code.google.com/p/v8/issues/detail?id=687
var setSymbolDesc = DESCRIPTORS && $fails(function () {
return _create(dP({}, 'a', {
get: function () { return dP(this, 'a', { value: 7 }).a; }
})).a != 7;
}) ? function (it, key, D) {
var protoDesc = gOPD(ObjectProto, key);
if (protoDesc) delete ObjectProto[key];
dP(it, key, D);
if (protoDesc && it !== ObjectProto) dP(ObjectProto, key, protoDesc);
} : dP;
var wrap = function (tag) {
var sym = AllSymbols[tag] = _create($Symbol[PROTOTYPE]);
sym._k = tag;
return sym;
};
var isSymbol = USE_NATIVE && typeof $Symbol.iterator == 'symbol' ? function (it) {
return typeof it == 'symbol';
} : function (it) {
return it instanceof $Symbol;
};
var $defineProperty = function defineProperty(it, key, D) {
if (it === ObjectProto) $defineProperty(OPSymbols, key, D);
anObject(it);
key = toPrimitive(key, true);
anObject(D);
if (has(AllSymbols, key)) {
if (!D.enumerable) {
if (!has(it, HIDDEN)) dP(it, HIDDEN, createDesc(1, {}));
it[HIDDEN][key] = true;
} else {
if (has(it, HIDDEN) && it[HIDDEN][key]) it[HIDDEN][key] = false;
D = _create(D, { enumerable: createDesc(0, false) });
} return setSymbolDesc(it, key, D);
} return dP(it, key, D);
};
var $defineProperties = function defineProperties(it, P) {
anObject(it);
var keys = enumKeys(P = toIObject(P));
var i = 0;
var l = keys.length;
var key;
while (l > i) $defineProperty(it, key = keys[i++], P[key]);
return it;
};
var $create = function create(it, P) {
return P === undefined ? _create(it) : $defineProperties(_create(it), P);
};
var $propertyIsEnumerable = function propertyIsEnumerable(key) {
var E = isEnum.call(this, key = toPrimitive(key, true));
if (this === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return false;
return E || !has(this, key) || !has(AllSymbols, key) || has(this, HIDDEN) && this[HIDDEN][key] ? E : true;
};
var $getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key) {
it = toIObject(it);
key = toPrimitive(key, true);
if (it === ObjectProto && has(AllSymbols, key) && !has(OPSymbols, key)) return;
var D = gOPD(it, key);
if (D && has(AllSymbols, key) && !(has(it, HIDDEN) && it[HIDDEN][key])) D.enumerable = true;
return D;
};
var $getOwnPropertyNames = function getOwnPropertyNames(it) {
var names = gOPN(toIObject(it));
var result = [];
var i = 0;
var key;
while (names.length > i) {
if (!has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META) result.push(key);
} return result;
};
var $getOwnPropertySymbols = function getOwnPropertySymbols(it) {
var IS_OP = it === ObjectProto;
var names = gOPN(IS_OP ? OPSymbols : toIObject(it));
var result = [];
var i = 0;
var key;
while (names.length > i) {
if (has(AllSymbols, key = names[i++]) && (IS_OP ? has(ObjectProto, key) : true)) result.push(AllSymbols[key]);
} return result;
};
// 19.4.1.1 Symbol([description])
if (!USE_NATIVE) {
$Symbol = function Symbol() {
if (this instanceof $Symbol) throw TypeError('Symbol is not a constructor!');
var tag = uid(arguments.length > 0 ? arguments[0] : undefined);
var $set = function (value) {
if (this === ObjectProto) $set.call(OPSymbols, value);
if (has(this, HIDDEN) && has(this[HIDDEN], tag)) this[HIDDEN][tag] = false;
setSymbolDesc(this, tag, createDesc(1, value));
};
if (DESCRIPTORS && setter) setSymbolDesc(ObjectProto, tag, { configurable: true, set: $set });
return wrap(tag);
};
redefine($Symbol[PROTOTYPE], 'toString', function toString() {
return this._k;
});
$GOPD.f = $getOwnPropertyDescriptor;
$DP.f = $defineProperty;
__webpack_require__(46).f = gOPNExt.f = $getOwnPropertyNames;
__webpack_require__(18).f = $propertyIsEnumerable;
__webpack_require__(32).f = $getOwnPropertySymbols;
if (DESCRIPTORS && !__webpack_require__(15)) {
redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true);
}
wksExt.f = function (name) {
return wrap(wks(name));
};
}
$export($export.G + $export.W + $export.F * !USE_NATIVE, { Symbol: $Symbol });
for (var es6Symbols = (
// 19.4.2.2, 19.4.2.3, 19.4.2.4, 19.4.2.6, 19.4.2.8, 19.4.2.9, 19.4.2.10, 19.4.2.11, 19.4.2.12, 19.4.2.13, 19.4.2.14
'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables'
).split(','), j = 0; es6Symbols.length > j;)wks(es6Symbols[j++]);
for (var wellKnownSymbols = $keys(wks.store), k = 0; wellKnownSymbols.length > k;) wksDefine(wellKnownSymbols[k++]);
$export($export.S + $export.F * !USE_NATIVE, 'Symbol', {
// 19.4.2.1 Symbol.for(key)
'for': function (key) {
return has(SymbolRegistry, key += '')
? SymbolRegistry[key]
: SymbolRegistry[key] = $Symbol(key);
},
// 19.4.2.5 Symbol.keyFor(sym)
keyFor: function keyFor(sym) {
if (!isSymbol(sym)) throw TypeError(sym + ' is not a symbol!');
for (var key in SymbolRegistry) if (SymbolRegistry[key] === sym) return key;
},
useSetter: function () { setter = true; },
useSimple: function () { setter = false; }
});
$export($export.S + $export.F * !USE_NATIVE, 'Object', {
// 19.1.2.2 Object.create(O [, Properties])
create: $create,
// 19.1.2.4 Object.defineProperty(O, P, Attributes)
defineProperty: $defineProperty,
// 19.1.2.3 Object.defineProperties(O, Properties)
defineProperties: $defineProperties,
// 19.1.2.6 Object.getOwnPropertyDescriptor(O, P)
getOwnPropertyDescriptor: $getOwnPropertyDescriptor,
// 19.1.2.7 Object.getOwnPropertyNames(O)
getOwnPropertyNames: $getOwnPropertyNames,
// 19.1.2.8 Object.getOwnPropertySymbols(O)
getOwnPropertySymbols: $getOwnPropertySymbols
});
// 24.3.2 JSON.stringify(value [, replacer [, space]])
$JSON && $export($export.S + $export.F * (!USE_NATIVE || $fails(function () {
var S = $Symbol();
// MS Edge converts symbol values to JSON as {}
// WebKit converts symbol values to JSON as null
// V8 throws on boxed symbols
return _stringify([S]) != '[null]' || _stringify({ a: S }) != '{}' || _stringify(Object(S)) != '{}';
})), 'JSON', {
stringify: function stringify(it) {
var args = [it];
var i = 1;
var replacer, $replacer;
while (arguments.length > i) args.push(arguments[i++]);
$replacer = replacer = args[1];
if (!isObject(replacer) && it === undefined || isSymbol(it)) return; // IE8 returns string on undefined
if (!isArray(replacer)) replacer = function (key, value) {
if (typeof $replacer == 'function') value = $replacer.call(this, key, value);
if (!isSymbol(value)) return value;
};
args[1] = replacer;
return _stringify.apply($JSON, args);
}
});
// 19.4.3.4 Symbol.prototype[@@toPrimitive](hint)
$Symbol[PROTOTYPE][TO_PRIMITIVE] || __webpack_require__(6)($Symbol[PROTOTYPE], TO_PRIMITIVE, $Symbol[PROTOTYPE].valueOf);
// 19.4.3.5 Symbol.prototype[@@toStringTag]
setToStringTag($Symbol, 'Symbol');
// 20.2.1.9 Math[@@toStringTag]
setToStringTag(Math, 'Math', true);
// 24.3.3 JSON[@@toStringTag]
setToStringTag(global.JSON, 'JSON', true);
/***/ }),
/* 75 */
/***/ (function(module, exports, __webpack_require__) {
var META = __webpack_require__(17)('meta');
var isObject = __webpack_require__(8);
var has = __webpack_require__(5);
var setDesc = __webpack_require__(4).f;
var id = 0;
var isExtensible = Object.isExtensible || function () {
return true;
};
var FREEZE = !__webpack_require__(11)(function () {
return isExtensible(Object.preventExtensions({}));
});
var setMeta = function (it) {
setDesc(it, META, { value: {
i: 'O' + ++id, // object ID
w: {} // weak collections IDs
} });
};
var fastKey = function (it, create) {
// return primitive with prefix
if (!isObject(it)) return typeof it == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it;
if (!has(it, META)) {
// can't set metadata to uncaught frozen object
if (!isExtensible(it)) return 'F';
// not necessary to add metadata
if (!create) return 'E';
// add missing metadata
setMeta(it);
// return object ID
} return it[META].i;
};
var getWeak = function (it, create) {
if (!has(it, META)) {
// can't set metadata to uncaught frozen object
if (!isExtensible(it)) return true;
// not necessary to add metadata
if (!create) return false;
// add missing metadata
setMeta(it);
// return hash weak collections IDs
} return it[META].w;
};
// add metadata on freeze-family methods calling
var onFreeze = function (it) {
if (FREEZE && meta.NEED && isExtensible(it) && !has(it, META)) setMeta(it);
return it;
};
var meta = module.exports = {
KEY: META,
NEED: false,
fastKey: fastKey,
getWeak: getWeak,
onFreeze: onFreeze
};
/***/ }),
/* 76 */
/***/ (function(module, exports, __webpack_require__) {
// all enumerable object keys, includes symbols
var getKeys = __webpack_require__(13);
var gOPS = __webpack_require__(32);
var pIE = __webpack_require__(18);
module.exports = function (it) {
var result = getKeys(it);
var getSymbols = gOPS.f;
if (getSymbols) {
var symbols = getSymbols(it);
var isEnum = pIE.f;
var i = 0;
var key;
while (symbols.length > i) if (isEnum.call(it, key = symbols[i++])) result.push(key);
} return result;
};
/***/ }),
/* 77 */
/***/ (function(module, exports, __webpack_require__) {
// 7.2.2 IsArray(argument)
var cof = __webpack_require__(21);
module.exports = Array.isArray || function isArray(arg) {
return cof(arg) == 'Array';
};
/***/ }),
/* 78 */
/***/ (function(module, exports, __webpack_require__) {
// fallback for IE11 buggy Object.getOwnPropertyNames with iframe and window
var toIObject = __webpack_require__(7);
var gOPN = __webpack_require__(46).f;
var toString = {}.toString;
var windowNames = typeof window == 'object' && window && Object.getOwnPropertyNames
? Object.getOwnPropertyNames(window) : [];
var getWindowNames = function (it) {
try {
return gOPN(it);
} catch (e) {
return windowNames.slice();
}
};
module.exports.f = function getOwnPropertyNames(it) {
return windowNames && toString.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(toIObject(it));
};
/***/ }),
/* 79 */
/***/ (function(module, exports, __webpack_require__) {
var pIE = __webpack_require__(18);
var createDesc = __webpack_require__(14);
var toIObject = __webpack_require__(7);
var toPrimitive = __webpack_require__(22);
var has = __webpack_require__(5);
var IE8_DOM_DEFINE = __webpack_require__(33);
var gOPD = Object.getOwnPropertyDescriptor;
exports.f = __webpack_require__(3) ? gOPD : function getOwnPropertyDescriptor(O, P) {
O = toIObject(O);
P = toPrimitive(P, true);
if (IE8_DOM_DEFINE) try {
return gOPD(O, P);
} catch (e) { /* empty */ }
if (has(O, P)) return createDesc(!pIE.f.call(O, P), O[P]);
};
/***/ }),
/* 80 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(30)('asyncIterator');
/***/ }),
/* 81 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(30)('observable');
/***/ }),
/* 82 */,
/* 83 */
/***/ (function(module, exports) {
// shim for using process in browser
var process = module.exports = {};
// cached from whatever global is present so that test runners that stub it
// don't break things. But we need to wrap it in a try catch in case it is
// wrapped in strict mode code which doesn't define any globals. It's inside a
// function because try/catches deoptimize in certain engines.
var cachedSetTimeout;
var cachedClearTimeout;
function defaultSetTimout() {
throw new Error('setTimeout has not been defined');
}
function defaultClearTimeout () {
throw new Error('clearTimeout has not been defined');
}
(function () {
try {
if (typeof setTimeout === 'function') {
cachedSetTimeout = setTimeout;
} else {
cachedSetTimeout = defaultSetTimout;
}
} catch (e) {
cachedSetTimeout = defaultSetTimout;
}
try {
if (typeof clearTimeout === 'function') {
cachedClearTimeout = clearTimeout;
} else {
cachedClearTimeout = defaultClearTimeout;
}
} catch (e) {
cachedClearTimeout = defaultClearTimeout;
}
} ())
function runTimeout(fun) {
if (cachedSetTimeout === setTimeout) {
//normal enviroments in sane situations
return setTimeout(fun, 0);
}
// if setTimeout wasn't available but was latter defined
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
cachedSetTimeout = setTimeout;
return setTimeout(fun, 0);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedSetTimeout(fun, 0);
} catch(e){
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedSetTimeout.call(null, fun, 0);
} catch(e){
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
return cachedSetTimeout.call(this, fun, 0);
}
}
}
function runClearTimeout(marker) {
if (cachedClearTimeout === clearTimeout) {
//normal enviroments in sane situations
return clearTimeout(marker);
}
// if clearTimeout wasn't available but was latter defined
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
cachedClearTimeout = clearTimeout;
return clearTimeout(marker);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedClearTimeout(marker);
} catch (e){
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedClearTimeout.call(null, marker);
} catch (e){
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
return cachedClearTimeout.call(this, marker);
}
}
}
var queue = [];
var draining = false;
var currentQueue;
var queueIndex = -1;
function cleanUpNextTick() {
if (!draining || !currentQueue) {
return;
}
draining = false;
if (currentQueue.length) {
queue = currentQueue.concat(queue);
} else {
queueIndex = -1;
}
if (queue.length) {
drainQueue();
}
}
function drainQueue() {
if (draining) {
return;
}
var timeout = runTimeout(cleanUpNextTick);
draining = true;
var len = queue.length;
while(len) {
currentQueue = queue;
queue = [];
while (++queueIndex < len) {
if (currentQueue) {
currentQueue[queueIndex].run();
}
}
queueIndex = -1;
len = queue.length;
}
currentQueue = null;
draining = false;
runClearTimeout(timeout);
}
process.nextTick = function (fun) {
var args = new Array(arguments.length - 1);
if (arguments.length > 1) {
for (var i = 1; i < arguments.length; i++) {
args[i - 1] = arguments[i];
}
}
queue.push(new Item(fun, args));
if (queue.length === 1 && !draining) {
runTimeout(drainQueue);
}
};
// v8 likes predictible objects
function Item(fun, array) {
this.fun = fun;
this.array = array;
}
Item.prototype.run = function () {
this.fun.apply(null, this.array);
};
process.title = 'browser';
process.browser = true;
process.env = {};
process.argv = [];
process.version = ''; // empty string to avoid regexp issues
process.versions = {};
function noop() {}
process.on = noop;
process.addListener = noop;
process.once = noop;
process.off = noop;
process.removeListener = noop;
process.removeAllListeners = noop;
process.emit = noop;
process.prependListener = noop;
process.prependOnceListener = noop;
process.listeners = function (name) { return [] }
process.binding = function (name) {
throw new Error('process.binding is not supported');
};
process.cwd = function () { return '/' };
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
process.umask = function() { return 0; };
/***/ }),
/* 84 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(37), __webpack_require__(105), __webpack_require__(39), __webpack_require__(35), __webpack_require__(91)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('babel-runtime/helpers/defineProperty'), require('../../locale/lang/zh-CN'), require('../helpers/debug'), require('../helpers/util'), require('../lang/date'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.defineProperty, global.zhCN, global.debug, global.util, global.date);
global.index = mod.exports;
}
})(this, function (module, exports, _defineProperty2, _zhCN, _debug, _util, _date) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _defineProperty3 = _interopRequireDefault(_defineProperty2);
var _zhCN2 = _interopRequireDefault(_zhCN);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var proto = void 0;
var DEFAULT_LANG = 'zh-CN';
var locale = {
name: 'locale',
install: function install(Vue) {
if (locale.installed) return;
proto = Vue.prototype;
Vue.util.defineReactive(proto, '$cubeLang', DEFAULT_LANG);
proto['$cubeMessages'] = (0, _defineProperty3.default)({}, DEFAULT_LANG, _zhCN2.default);
locale.installed = true;
},
use: function use(lang, messages) {
proto['$cubeLang'] = lang;
var cubeMessages = proto['$cubeMessages'];
if (!(lang in cubeMessages)) {
cubeMessages[[lang]] = messages;
}
},
helpers: {
toLocaleDateString: function toLocaleDateString(config, formatRules) {
var compatibleConfig = (0, _util.isNumber)(config) ? config : config.replace(/-/g, '/');
var date = new Date(compatibleConfig);
if ((0, _util.isUndef)(formatRules)) return date.toDateString();
return (0, _date.formatDate)(date, formatRules);
}
},
addHelper: function addHelper(fnName, fn) {
if (fnName in locale.helpers) {
(0, _debug.warn)(fnName + ' has already been registered on helpers function, please change another name');
return;
}
locale.helpers[fnName] = fn;
}
};
exports.default = locale;
module.exports = exports['default'];
});
/***/ }),
/* 85 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(84), __webpack_require__(35), __webpack_require__(39)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('../locale'), require('../helpers/util'), require('../helpers/debug'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.locale, global.util, global.debug);
global.locale = mod.exports;
}
})(this, function (module, exports, _locale, _util, _debug) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _locale2 = _interopRequireDefault(_locale);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var TRANSLATION_ABSENT = 'Translation is not registered correctly, ' + 'you can call Locale.use() to install it.';
exports.default = {
computed: {
$t: function $t() {
var lang = this.$cubeLang;
var messages = this.$cubeMessages[lang];
if ((0, _util.isUndef)(messages)) {
(0, _debug.warn)(TRANSLATION_ABSENT);
return '';
}
return function (path) {
return (0, _util.parsePath)(messages, path);
};
}
},
beforeCreate: function beforeCreate() {
_locale2.default.install(this.$root.constructor);
}
};
module.exports = exports['default'];
});
/***/ }),
/* 86 */,
/* 87 */,
/* 88 */
/***/ (function(module, exports, __webpack_require__) {
module.exports = { "default": __webpack_require__(92), __esModule: true };
/***/ }),
/* 89 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(99)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('vue-create-api'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.vueCreateApi);
global.createApi = mod.exports;
}
})(this, function (module, exports, _vueCreateApi) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = createAPI;
var _vueCreateApi2 = _interopRequireDefault(_vueCreateApi);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function createAPI(Vue, Component, events, single) {
Vue.use(_vueCreateApi2.default, { componentPrefix: 'cube-' });
var api = Vue.createAPI(Component, events, single);
return api;
}
module.exports = exports['default'];
});
/***/ }),
/* 90 */,
/* 91 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(exports);
} else {
var mod = {
exports: {}
};
factory(mod.exports);
global.date = mod.exports;
}
})(this, function (exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var DAY_TIMESTAMP = 60 * 60 * 24 * 1000;
var HOUR_TIMESTAMP = 60 * 60 * 1000;
var MINUTE_TIMESTAMP = 60 * 1000;
function formatType(type, format, value, regExpAttributes) {
var regExpMap = {
year: '(Y+)',
month: '(M+)',
date: '(D+)',
hour: '(h+)',
minute: '(m+)',
second: '(s+)',
quarter: '(q+)',
millisecond: '(S)'
};
if (new RegExp(regExpMap[type], regExpAttributes).test(format)) {
var replaceStr = type === 'year' ? value.toString().substr(4 - RegExp.$1.length) : RegExp.$1.length === 1 ? value : pad(value);
format = format.replace(RegExp.$1, replaceStr);
}
return format;
}
function pad(value) {
return ('00' + value).substr(('' + value).length);
}
function formatDate(date, format) {
var map = {
year: {
value: date.getFullYear(),
regExpAttributes: 'i'
},
month: {
value: date.getMonth() + 1
},
date: {
value: date.getDate(),
regExpAttributes: 'i'
},
hour: {
value: date.getHours(),
regExpAttributes: 'i'
},
minute: {
value: date.getMinutes()
},
second: {
value: date.getSeconds()
},
quarter: {
value: Math.floor((date.getMonth() + 3) / 3),
regExpAttributes: 'i'
},
millisecond: {
value: date.getMilliseconds()
}
};
for (var key in map) {
format = formatType(key, format, map[key].value, map[key].regExpAttributes);
}
return format;
}
function getZeroStamp(date) {
var year = date.getFullYear();
var month = date.getMonth() + 1;
var day = date.getDate();
return +new Date(year + '/' + month + '/' + day + ' 00:00:00');
}
function getDayDiff(date1, date2) {
return Math.floor((getZeroStamp(date1) - getZeroStamp(date2)) / DAY_TIMESTAMP);
}
function getNow() {
return window.performance && window.performance.now ? window.performance.now() + window.performance.timing.navigationStart : +new Date();
}
function computeNatureMaxDay(month, year) {
var natureMaxDay = 30;
if ([1, 3, 5, 7, 8, 10, 12].indexOf(month) > -1) {
natureMaxDay = 31;
} else {
if (month === 2) {
natureMaxDay = !year || !(year % 400) || !(year % 4) && year % 100 ? 29 : 28;
}
}
return natureMaxDay;
}
exports.DAY_TIMESTAMP = DAY_TIMESTAMP;
exports.HOUR_TIMESTAMP = HOUR_TIMESTAMP;
exports.MINUTE_TIMESTAMP = MINUTE_TIMESTAMP;
exports.pad = pad;
exports.formatType = formatType;
exports.formatDate = formatDate;
exports.getZeroStamp = getZeroStamp;
exports.getDayDiff = getDayDiff;
exports.getNow = getNow;
exports.computeNatureMaxDay = computeNatureMaxDay;
});
/***/ }),
/* 92 */
/***/ (function(module, exports, __webpack_require__) {
__webpack_require__(93);
module.exports = __webpack_require__(0).Object.assign;
/***/ }),
/* 93 */
/***/ (function(module, exports, __webpack_require__) {
// 19.1.3.1 Object.assign(target, source)
var $export = __webpack_require__(10);
$export($export.S + $export.F, 'Object', { assign: __webpack_require__(94) });
/***/ }),
/* 94 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
// 19.1.2.1 Object.assign(target, source, ...)
var getKeys = __webpack_require__(13);
var gOPS = __webpack_require__(32);
var pIE = __webpack_require__(18);
var toObject = __webpack_require__(27);
var IObject = __webpack_require__(40);
var $assign = Object.assign;
// should work with symbols and should have deterministic property order (V8 bug)
module.exports = !$assign || __webpack_require__(11)(function () {
var A = {};
var B = {};
// eslint-disable-next-line no-undef
var S = Symbol();
var K = 'abcdefghijklmnopqrst';
A[S] = 7;
K.split('').forEach(function (k) { B[k] = k; });
return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K;
}) ? function assign(target, source) { // eslint-disable-line no-unused-vars
var T = toObject(target);
var aLen = arguments.length;
var index = 1;
var getSymbols = gOPS.f;
var isEnum = pIE.f;
while (aLen > index) {
var S = IObject(arguments[index++]);
var keys = getSymbols ? getKeys(S).concat(getSymbols(S)) : getKeys(S);
var length = keys.length;
var j = 0;
var key;
while (length > j) if (isEnum.call(S, key = keys[j++])) T[key] = S[key];
} return T;
} : $assign;
/***/ }),
/* 95 */,
/* 96 */,
/* 97 */,
/* 98 */,
/* 99 */
/***/ (function(module, __webpack_exports__, __webpack_require__) {
"use strict";
Object.defineProperty(__webpack_exports__, "__esModule", { value: true });
/**
* vue-create-api v0.2.3
* (c) 2019 ustbhuangyi
* @license MIT
*/
var _extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
var camelizeRE = /-(\w)/g;
function camelize(str) {
return (str + '').replace(camelizeRE, function (m, c) {
return c ? c.toUpperCase() : '';
});
}
function escapeReg(str, delimiter) {
return (str + '').replace(new RegExp('[.\\\\+*?\\[\\^\\]$(){}=!<>|:\\' + (delimiter || '') + '-]', 'g'), '\\$&');
}
function isBoolean(value) {
return typeof value === 'boolean';
}
function isUndef(value) {
return value === undefined;
}
function isStr(value) {
return typeof value === 'string';
}
function isFunction(fn) {
return typeof fn === 'function';
}
function assert(condition, msg) {
if (!condition) {
throw new Error("[vue-create-api error]: " + msg);
}
}
function instantiateComponent(Vue, Component, data, renderFn, options) {
var renderData = void 0;
var childrenRenderFn = void 0;
var instance = new Vue(_extends({}, options, {
render: function render(createElement) {
var children = childrenRenderFn && childrenRenderFn(createElement);
if (children && !Array.isArray(children)) {
children = [children];
}
return createElement(Component, _extends({}, renderData), children || []);
},
methods: {
init: function init() {
document.body.appendChild(this.$el);
},
destroy: function destroy() {
this.$destroy();
if (this.$el && this.$el.parentNode === document.body) {
document.body.removeChild(this.$el);
}
}
}
}));
instance.updateRenderData = function (data, render) {
renderData = data;
childrenRenderFn = render;
};
instance.updateRenderData(data, renderFn);
instance.$mount();
instance.init();
var component = instance.$children[0];
component.$updateProps = function (props) {
_extends(renderData.props, props);
instance.$forceUpdate();
};
return component;
}
function parseRenderData() {
var data = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var events = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
events = parseEvents(events);
var props = _extends({}, data);
var on = {};
for (var name in events) {
if (events.hasOwnProperty(name)) {
var handlerName = events[name];
if (props[handlerName]) {
on[name] = props[handlerName];
delete props[handlerName];
}
}
}
return {
props: props,
on: on
};
}
function parseEvents(events) {
var parsedEvents = {};
events.forEach(function (name) {
parsedEvents[name] = camelize('on-' + name);
});
return parsedEvents;
}
var eventBeforeDestroy = 'hook:beforeDestroy';
function apiCreator(Component) {
var events = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : [];
var single = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : false;
var Vue = this;
var singleMap = {};
var beforeHooks = [];
function createComponent(renderData, renderFn, options, single) {
beforeHooks.forEach(function (before) {
before(renderData, renderFn, single);
});
var ownerInsUid = options.parent ? options.parent._uid : -1;
var _ref = singleMap[ownerInsUid] ? singleMap[ownerInsUid] : {},
comp = _ref.comp,
ins = _ref.ins;
if (single && comp && ins) {
ins.updateRenderData(renderData, renderFn);
ins.$forceUpdate();
return comp;
}
var component = instantiateComponent(Vue, Component, renderData, renderFn, options);
var instance = component.$parent;
var originRemove = component.remove;
component.remove = function () {
if (single) {
if (!singleMap[ownerInsUid]) {
return;
}
singleMap[ownerInsUid] = null;
}
originRemove && originRemove.apply(this, arguments);
instance.destroy();
};
var originShow = component.show;
component.show = function () {
originShow && originShow.apply(this, arguments);
return this;
};
var originHide = component.hide;
component.hide = function () {
originHide && originHide.apply(this, arguments);
return this;
};
if (single) {
singleMap[ownerInsUid] = {
comp: component,
ins: instance
};
}
return component;
}
function processProps(ownerInstance, renderData, isInVueInstance, onChange) {
var $props = renderData.props.$props;
if ($props) {
delete renderData.props.$props;
var watchKeys = [];
var watchPropKeys = [];
Object.keys($props).forEach(function (key) {
var propKey = $props[key];
if (isStr(propKey) && propKey in ownerInstance) {
// get instance value
renderData.props[key] = ownerInstance[propKey];
watchKeys.push(key);
watchPropKeys.push(propKey);
} else {
renderData.props[key] = propKey;
}
});
if (isInVueInstance) {
var unwatchFn = ownerInstance.$watch(function () {
var props = {};
watchKeys.forEach(function (key, i) {
props[key] = ownerInstance[watchPropKeys[i]];
});
return props;
}, onChange);
ownerInstance.__unwatchFns__.push(unwatchFn);
}
}
}
function processEvents(renderData, ownerInstance) {
var $events = renderData.props.$events;
if ($events) {
delete renderData.props.$events;
Object.keys($events).forEach(function (event) {
var eventHandler = $events[event];
if (typeof eventHandler === 'string') {
eventHandler = ownerInstance[eventHandler];
}
renderData.on[event] = eventHandler;
});
}
}
function process$(renderData) {
var props = renderData.props;
Object.keys(props).forEach(function (prop) {
if (prop.charAt(0) === '$') {
renderData[prop.slice(1)] = props[prop];
delete props[prop];
}
});
}
function cancelWatchProps(ownerInstance) {
if (ownerInstance.__unwatchFns__) {
ownerInstance.__unwatchFns__.forEach(function (unwatchFn) {
unwatchFn();
});
ownerInstance.__unwatchFns__ = null;
}
}
var api = {
before: function before(hook) {
beforeHooks.push(hook);
},
create: function create(config, renderFn, _single) {
if (!isFunction(renderFn) && isUndef(_single)) {
_single = renderFn;
renderFn = null;
}
if (isUndef(_single)) {
_single = single;
}
var ownerInstance = this;
var isInVueInstance = !!ownerInstance.$on;
var options = {};
if (isInVueInstance) {
// Set parent to store router i18n ...
options.parent = ownerInstance;
if (!ownerInstance.__unwatchFns__) {
ownerInstance.__unwatchFns__ = [];
}
}
var renderData = parseRenderData(config, events);
var component = null;
processProps(ownerInstance, renderData, isInVueInstance, function (newProps) {
component && component.$updateProps(newProps);
});
processEvents(renderData, ownerInstance);
process$(renderData);
component = createComponent(renderData, renderFn, options, _single);
if (isInVueInstance) {
ownerInstance.$on(eventBeforeDestroy, beforeDestroy);
}
function beforeDestroy() {
cancelWatchProps(ownerInstance);
component.remove();
component = null;
}
return component;
}
};
return api;
}
function install(Vue) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var _options$componentPre = options.componentPrefix,
componentPrefix = _options$componentPre === undefined ? '' : _options$componentPre,
_options$apiPrefix = options.apiPrefix,
apiPrefix = _options$apiPrefix === undefined ? '$create-' : _options$apiPrefix;
Vue.createAPI = function (Component, events, single) {
if (isBoolean(events)) {
single = events;
events = [];
}
var api = apiCreator.call(this, Component, events, single);
var createName = processComponentName(Component, {
componentPrefix: componentPrefix,
apiPrefix: apiPrefix
});
Vue.prototype[createName] = Component.$create = api.create;
return api;
};
}
function processComponentName(Component, options) {
var componentPrefix = options.componentPrefix,
apiPrefix = options.apiPrefix;
var name = Component.name;
assert(name, 'Component must have name while using create-api!');
var prefixReg = new RegExp('^' + escapeReg(componentPrefix), 'i');
var pureName = name.replace(prefixReg, '');
var camelizeName = '' + camelize('' + apiPrefix + pureName);
return camelizeName;
}
var index = {
install: install,
instantiateComponent: instantiateComponent,
version: '0.2.3'
};
/* harmony default export */ __webpack_exports__["default"] = (index);
/***/ }),
/* 100 */,
/* 101 */
/***/ (function(module, exports, __webpack_require__) {
function injectStyle (ssrContext) {
__webpack_require__(102)
}
var Component = __webpack_require__(9)(
/* script */
__webpack_require__(103),
/* template */
__webpack_require__(104),
/* styles */
injectStyle,
/* scopeId */
null,
/* moduleIdentifier (server only) */
null
)
module.exports = Component.exports
/***/ }),
/* 102 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 103 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(37), __webpack_require__(58), __webpack_require__(69)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('babel-runtime/helpers/defineProperty'), require('../../common/mixins/visibility'), require('../../common/mixins/popup'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.defineProperty, global.visibility, global.popup);
global.popup = mod.exports;
}
})(this, function (module, exports, _defineProperty2, _visibility, _popup) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _defineProperty3 = _interopRequireDefault(_defineProperty2);
var _visibility2 = _interopRequireDefault(_visibility);
var _popup2 = _interopRequireDefault(_popup);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var COMPONENT_NAME = 'cube-popup';
var EVENT_MASK_CLICK = 'mask-click';
exports.default = {
name: COMPONENT_NAME,
mixins: [_visibility2.default, _popup2.default],
props: {
type: {
type: String,
default: ''
},
mask: {
type: Boolean,
default: true
},
content: {
type: String,
default: ''
},
center: {
type: Boolean,
default: true
},
position: {
type: String,
default: ''
}
},
computed: {
rootClass: function rootClass() {
var cls = {
'cube-popup_mask': this.mask
};
if (this.type) {
cls['cube-' + this.type] = true;
}
return cls;
},
containerClass: function containerClass() {
var center = this.center;
var position = this.position;
if (position) {
return (0, _defineProperty3.default)({}, 'cube-popup-' + position, true);
}
if (center) {
return {
'cube-popup-center': true
};
}
}
},
methods: {
maskClick: function maskClick(e) {
this.$emit(EVENT_MASK_CLICK, e);
if (this.maskClosable) {
this.hide();
}
}
}
};
module.exports = exports['default'];
});
/***/ }),
/* 104 */
/***/ (function(module, exports) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
return _c('div', {
directives: [{
name: "show",
rawName: "v-show",
value: (_vm.isVisible),
expression: "isVisible"
}],
staticClass: "cube-popup",
class: _vm.rootClass,
style: ({
'z-index': _vm.zIndex
})
}, [_c('div', {
staticClass: "cube-popup-mask",
on: {
"touchmove": function($event) {
$event.preventDefault();
},
"click": _vm.maskClick
}
}, [_vm._t("mask")], 2), _vm._v(" "), _c('div', {
staticClass: "cube-popup-container",
class: _vm.containerClass,
on: {
"touchmove": function($event) {
$event.preventDefault();
}
}
}, [(_vm.$slots.default) ? _c('div', {
staticClass: "cube-popup-content"
}, [_vm._t("default")], 2) : _c('div', {
staticClass: "cube-popup-content",
domProps: {
"innerHTML": _vm._s(_vm.content)
}
})])])
},staticRenderFns: []}
/***/ }),
/* 105 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.zhCN = mod.exports;
}
})(this, function (module, exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = {
cancel: '取消',
confirm: '确认',
ok: '确定',
prev: '上一步',
next: '下一步',
selectText: '请选择',
now: '现在',
selectTime: '选择时间',
today: '今日',
formatDate: 'M月D日',
hours: '点',
minutes: '分',
validator: {
required: '此为必填项',
type: {
string: '请输入字符',
number: '请输入数字',
array: '数据类型应为数组',
date: '请选择有效日期',
email: '请输入有效邮箱',
tel: '请输入有效的手机号码',
url: '请输入有效网址'
},
min: {
string: '至少输入 {{config}} 位字符',
number: '不得小于 {{config}}',
array: '请选择至少 {{config}} 项',
date: '请选择 {{config | toLocaleDateString("yyyy年MM月dd日")}} 之后的时间',
email: '至少输入 {{config}} 位字符',
tel: '至少输入 {{config}} 位字符',
url: '至少输入 {{config}} 位字符'
},
max: {
string: '请勿超过 {{config}} 位字符',
number: '请勿大于 {{config}}',
array: '最多选择 {{config}} 项',
date: '请选择 {{config | toLocaleDateString("yyyy年MM月dd日")}} 之前的时间',
email: '请勿超过 {{config}} 位字符',
tel: '请勿超过 {{config}} 位字符',
url: '请勿超过 {{config}} 位字符'
},
len: {
string: '请输入 {{config}} 位字符',
number: '长度应等于 {{config}}',
array: '请选择 {{config}} 项',
date: '请选择 {{config | toLocaleDateString("yyyy年MM月dd日")}} 之前的时间',
email: '请输入 {{config}} 位字符',
tel: '请输入 {{config}} 位字符',
url: '请输入 {{config}} 位字符'
},
pattern: '格式错误',
custom: '未通过校验',
notWhitespace: '空白内容无效'
}
};
module.exports = exports['default'];
});
/***/ }),
/* 106 */,
/* 107 */,
/* 108 */,
/* 109 */,
/* 110 */,
/* 111 */,
/* 112 */,
/* 113 */,
/* 114 */,
/* 115 */,
/* 116 */,
/* 117 */,
/* 118 */,
/* 119 */,
/* 120 */,
/* 121 */,
/* 122 */,
/* 123 */,
/* 124 */,
/* 125 */,
/* 126 */,
/* 127 */,
/* 128 */,
/* 129 */,
/* 130 */,
/* 131 */,
/* 132 */,
/* 133 */,
/* 134 */,
/* 135 */,
/* 136 */,
/* 137 */,
/* 138 */,
/* 139 */,
/* 140 */,
/* 141 */,
/* 142 */,
/* 143 */,
/* 144 */,
/* 145 */,
/* 146 */,
/* 147 */,
/* 148 */,
/* 149 */,
/* 150 */,
/* 151 */,
/* 152 */,
/* 153 */,
/* 154 */,
/* 155 */,
/* 156 */,
/* 157 */,
/* 158 */,
/* 159 */,
/* 160 */,
/* 161 */,
/* 162 */,
/* 163 */,
/* 164 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports);
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports);
global.input = mod.exports;
}
})(this, function (module, exports) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var EVENT_CHANGE = 'change';
exports.default = {
methods: {
changeHander: function changeHander(e) {
this.$emit(EVENT_CHANGE, e);
},
focus: function focus() {
this.$refs.input.focus();
},
blur: function blur() {
this.$refs.input.blur();
}
}
};
module.exports = exports['default'];
});
/***/ }),
/* 165 */,
/* 166 */,
/* 167 */,
/* 168 */,
/* 169 */,
/* 170 */,
/* 171 */,
/* 172 */,
/* 173 */,
/* 174 */
/***/ (function(module, exports, __webpack_require__) {
function injectStyle (ssrContext) {
__webpack_require__(179)
}
var Component = __webpack_require__(9)(
/* script */
__webpack_require__(180),
/* template */
__webpack_require__(181),
/* styles */
injectStyle,
/* scopeId */
null,
/* moduleIdentifier (server only) */
null
)
module.exports = Component.exports
/***/ }),
/* 175 */,
/* 176 */,
/* 177 */,
/* 178 */,
/* 179 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 180 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(88), __webpack_require__(164)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('babel-runtime/core-js/object/assign'), require('../../common/mixins/input'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.assign, global.input);
global.input = mod.exports;
}
})(this, function (module, exports, _assign, _input) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _assign2 = _interopRequireDefault(_assign);
var _input2 = _interopRequireDefault(_input);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var COMPONENT_NAME = 'cube-input';
var EVENT_INPUT = 'input';
var EVENT_BLUR = 'blur';
var EVENT_FOCUS = 'focus';
exports.default = {
name: COMPONENT_NAME,
mixins: [_input2.default],
props: {
value: [String, Number],
type: {
type: String,
default: 'text'
},
disabled: {
type: Boolean,
default: false
},
placeholder: String,
readonly: {
type: Boolean,
default: false
},
autofocus: {
type: Boolean,
default: false
},
autocomplete: {
type: [Boolean, String],
default: false
},
name: String,
id: String,
form: String,
minlength: Number,
maxlength: Number,
resize: String,
min: Number,
max: Number,
step: Number,
tabindex: String,
pattern: String,
clearable: {
type: [Boolean, Object],
default: false
},
eye: {
type: [Boolean, Object],
default: false
}
},
data: function data() {
return {
inputValue: this.value,
isFocus: false,
formatedClearable: {
visible: false,
blurHidden: true
},
formatedEye: {
open: false,
reverse: false
}
};
},
computed: {
_type: function _type() {
var type = this.type;
if (type === 'password' && this.eye && this.pwdVisible) {
return 'text';
}
return type;
},
_showClear: function _showClear() {
var visible = this.formatedClearable.visible && this.inputValue && !this.readonly && !this.disabled;
if (this.formatedClearable.blurHidden && !this.isFocus) {
visible = false;
}
return visible;
},
_showPwdEye: function _showPwdEye() {
return this.type === 'password' && this.eye && !this.disabled;
},
pwdVisible: function pwdVisible() {
var eye = this.formatedEye;
return eye.reverse ? !eye.open : eye.open;
},
eyeClass: function eyeClass() {
return this.formatedEye.open ? 'cubeic-eye-visible' : 'cubeic-eye-invisible';
}
},
watch: {
value: function value(newValue) {
this.inputValue = newValue;
},
inputValue: function inputValue(newValue) {
this.$emit(EVENT_INPUT, newValue);
},
clearable: {
handler: function handler() {
this.formatClearable();
},
deep: true,
immediate: true
},
eye: {
handler: function handler() {
this.formateEye();
},
deep: true,
immediate: true
}
},
methods: {
formatClearable: function formatClearable() {
if (typeof this.clearable === 'boolean') {
this.formatedClearable.visible = this.clearable;
} else {
(0, _assign2.default)(this.formatedClearable, this.clearable);
}
},
formateEye: function formateEye() {
if (typeof this.eye === 'boolean') {
this.formatedEye.open = this.eye;
} else {
(0, _assign2.default)(this.formatedEye, this.eye);
}
},
handleFocus: function handleFocus(e) {
this.$emit(EVENT_FOCUS, e);
this.isFocus = true;
},
handleBlur: function handleBlur(e) {
this.$emit(EVENT_BLUR, e);
this.isFocus = false;
},
handleClear: function handleClear(e) {
this.inputValue = '';
this.$refs.input.focus();
},
handlePwdEye: function handlePwdEye() {
this.formatedEye.open = !this.formatedEye.open;
}
}
};
module.exports = exports['default'];
});
/***/ }),
/* 181 */
/***/ (function(module, exports) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
return _c('div', {
staticClass: "cube-input",
class: {
'cube-input_active': _vm.isFocus
}
}, [(_vm.$slots.prepend) ? _c('div', {
staticClass: "cube-input-prepend"
}, [_vm._t("prepend")], 2) : _vm._e(), _vm._v(" "), ((_vm._type) === 'checkbox') ? _c('input', _vm._b({
directives: [{
name: "model",
rawName: "v-model",
value: (_vm.inputValue),
expression: "inputValue"
}],
ref: "input",
staticClass: "cube-input-field",
attrs: {
"disabled": _vm.disabled,
"readonly": _vm.readonly,
"autocomplete": _vm.autocomplete,
"autofocus": _vm.autofocus,
"type": "checkbox"
},
domProps: {
"checked": Array.isArray(_vm.inputValue) ? _vm._i(_vm.inputValue, null) > -1 : (_vm.inputValue)
},
on: {
"focus": _vm.handleFocus,
"blur": _vm.handleBlur,
"change": [function($event) {
var $$a = _vm.inputValue,
$$el = $event.target,
$$c = $$el.checked ? (true) : (false);
if (Array.isArray($$a)) {
var $$v = null,
$$i = _vm._i($$a, $$v);
if ($$el.checked) {
$$i < 0 && (_vm.inputValue = $$a.concat([$$v]))
} else {
$$i > -1 && (_vm.inputValue = $$a.slice(0, $$i).concat($$a.slice($$i + 1)))
}
} else {
_vm.inputValue = $$c
}
}, _vm.changeHander]
}
}, 'input', _vm.$props, false)) : ((_vm._type) === 'radio') ? _c('input', _vm._b({
directives: [{
name: "model",
rawName: "v-model",
value: (_vm.inputValue),
expression: "inputValue"
}],
ref: "input",
staticClass: "cube-input-field",
attrs: {
"disabled": _vm.disabled,
"readonly": _vm.readonly,
"autocomplete": _vm.autocomplete,
"autofocus": _vm.autofocus,
"type": "radio"
},
domProps: {
"checked": _vm._q(_vm.inputValue, null)
},
on: {
"focus": _vm.handleFocus,
"blur": _vm.handleBlur,
"change": [function($event) {
_vm.inputValue = null
}, _vm.changeHander]
}
}, 'input', _vm.$props, false)) : _c('input', _vm._b({
directives: [{
name: "model",
rawName: "v-model",
value: (_vm.inputValue),
expression: "inputValue"
}],
ref: "input",
staticClass: "cube-input-field",
attrs: {
"disabled": _vm.disabled,
"readonly": _vm.readonly,
"autocomplete": _vm.autocomplete,
"autofocus": _vm.autofocus,
"type": _vm._type
},
domProps: {
"value": (_vm.inputValue)
},
on: {
"focus": _vm.handleFocus,
"blur": _vm.handleBlur,
"change": _vm.changeHander,
"input": function($event) {
if ($event.target.composing) { return; }
_vm.inputValue = $event.target.value
}
}
}, 'input', _vm.$props, false)), _vm._v(" "), (_vm.$slots.append || _vm._showClear || _vm._showPwdEye) ? _c('div', {
staticClass: "cube-input-append"
}, [(_vm._showClear) ? _c('div', {
staticClass: "cube-input-clear",
on: {
"touchend": _vm.handleClear
}
}, [_c('i', {
staticClass: "cubeic-wrong"
})]) : _vm._e(), _vm._v(" "), (_vm._showPwdEye) ? _c('div', {
staticClass: "cube-input-eye",
on: {
"click": _vm.handlePwdEye
}
}, [_c('i', {
class: _vm.eyeClass
})]) : _vm._e(), _vm._v(" "), _vm._t("append")], 2) : _vm._e()])
},staticRenderFns: []}
/***/ }),
/* 182 */,
/* 183 */,
/* 184 */,
/* 185 */,
/* 186 */,
/* 187 */,
/* 188 */,
/* 189 */,
/* 190 */,
/* 191 */,
/* 192 */,
/* 193 */,
/* 194 */,
/* 195 */,
/* 196 */,
/* 197 */,
/* 198 */,
/* 199 */,
/* 200 */,
/* 201 */,
/* 202 */,
/* 203 */,
/* 204 */,
/* 205 */,
/* 206 */,
/* 207 */,
/* 208 */,
/* 209 */,
/* 210 */,
/* 211 */
/***/ (function(module, exports, __webpack_require__) {
"use strict";
exports.__esModule = true;
var _assign = __webpack_require__(88);
var _assign2 = _interopRequireDefault(_assign);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = _assign2.default || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
/***/ }),
/* 212 */,
/* 213 */,
/* 214 */,
/* 215 */,
/* 216 */,
/* 217 */,
/* 218 */,
/* 219 */,
/* 220 */,
/* 221 */,
/* 222 */,
/* 223 */,
/* 224 */,
/* 225 */,
/* 226 */,
/* 227 */,
/* 228 */,
/* 229 */,
/* 230 */,
/* 231 */,
/* 232 */,
/* 233 */,
/* 234 */,
/* 235 */,
/* 236 */,
/* 237 */,
/* 238 */,
/* 239 */,
/* 240 */,
/* 241 */,
/* 242 */,
/* 243 */,
/* 244 */,
/* 245 */,
/* 246 */,
/* 247 */,
/* 248 */,
/* 249 */,
/* 250 */,
/* 251 */,
/* 252 */,
/* 253 */,
/* 254 */,
/* 255 */,
/* 256 */,
/* 257 */,
/* 258 */,
/* 259 */,
/* 260 */,
/* 261 */,
/* 262 */,
/* 263 */,
/* 264 */,
/* 265 */,
/* 266 */,
/* 267 */,
/* 268 */,
/* 269 */,
/* 270 */,
/* 271 */,
/* 272 */,
/* 273 */,
/* 274 */,
/* 275 */,
/* 276 */,
/* 277 */,
/* 278 */,
/* 279 */,
/* 280 */,
/* 281 */,
/* 282 */,
/* 283 */,
/* 284 */,
/* 285 */,
/* 286 */,
/* 287 */,
/* 288 */,
/* 289 */,
/* 290 */,
/* 291 */,
/* 292 */,
/* 293 */,
/* 294 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(295), __webpack_require__(84), __webpack_require__(174), __webpack_require__(299)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('../../components/dialog/dialog.vue'), require('../../common/locale'), require('../../components/input/input.vue'), require('./api'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.dialog, global.locale, global.input, global.api);
global.index = mod.exports;
}
})(this, function (module, exports, _dialog, _locale, _input, _api) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _dialog2 = _interopRequireDefault(_dialog);
var _locale2 = _interopRequireDefault(_locale);
var _input2 = _interopRequireDefault(_input);
var _api2 = _interopRequireDefault(_api);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
_dialog2.default.install = function (Vue) {
Vue.component(_input2.default.name, _input2.default);
Vue.component(_dialog2.default.name, _dialog2.default);
_locale2.default.install(Vue);
(0, _api2.default)(Vue, _dialog2.default);
};
_dialog2.default.Input = _input2.default;
exports.default = _dialog2.default;
module.exports = exports['default'];
});
/***/ }),
/* 295 */
/***/ (function(module, exports, __webpack_require__) {
function injectStyle (ssrContext) {
__webpack_require__(296)
}
var Component = __webpack_require__(9)(
/* script */
__webpack_require__(297),
/* template */
__webpack_require__(298),
/* styles */
injectStyle,
/* scopeId */
null,
/* moduleIdentifier (server only) */
null
)
module.exports = Component.exports
/***/ }),
/* 296 */
/***/ (function(module, exports) {
// removed by extract-text-webpack-plugin
/***/ }),
/* 297 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(211), __webpack_require__(88), __webpack_require__(101), __webpack_require__(174), __webpack_require__(58), __webpack_require__(69), __webpack_require__(85)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('babel-runtime/helpers/extends'), require('babel-runtime/core-js/object/assign'), require('../popup/popup.vue'), require('../input/input.vue'), require('../../common/mixins/visibility'), require('../../common/mixins/popup'), require('../../common/mixins/locale'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global._extends, global.assign, global.popup, global.input, global.visibility, global.popup, global.locale);
global.dialog = mod.exports;
}
})(this, function (module, exports, _extends2, _assign, _popup, _input, _visibility, _popup3, _locale) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _extends3 = _interopRequireDefault(_extends2);
var _assign2 = _interopRequireDefault(_assign);
var _popup2 = _interopRequireDefault(_popup);
var _input2 = _interopRequireDefault(_input);
var _visibility2 = _interopRequireDefault(_visibility);
var _popup4 = _interopRequireDefault(_popup3);
var _locale2 = _interopRequireDefault(_locale);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
var COMPONENT_NAME = 'cube-dialog';
var EVENT_CONFIRM = 'confirm';
var EVENT_CANCEL = 'cancel';
var EVENT_CLOSE = 'close';
var defHref = 'javascript:;';
var defConfirmBtn = {
textType: 'ok',
active: true,
disabled: false,
href: defHref
};
var defCancelBtn = {
textType: 'cancel',
active: false,
disabled: false,
href: defHref
};
var parseBtn = function parseBtn(btn, defBtn) {
if (typeof btn === 'string') {
btn = {
text: btn
};
}
var text = defBtn && this.$t(defBtn.textType);
return (0, _assign2.default)({}, defBtn, { text: text }, btn);
};
exports.default = {
name: COMPONENT_NAME,
mixins: [_visibility2.default, _popup4.default, _locale2.default],
props: {
type: {
type: String,
default: 'alert'
},
prompt: {
type: Object,
default: function _default() {
return {
value: '',
placeholder: ''
};
}
},
icon: {
type: String,
default: ''
},
title: {
type: String,
default: ''
},
content: {
type: String,
default: ''
},
showClose: {
type: Boolean,
default: false
},
confirmBtn: {
type: [Object, String],
default: function _default() {
return (0, _extends3.default)({}, defConfirmBtn);
}
},
cancelBtn: {
type: [Object, String],
default: function _default() {
return (0, _extends3.default)({}, defCancelBtn);
}
}
},
data: function data() {
return {
defHref: defHref,
promptValue: this.prompt.value
};
},
computed: {
_confirmBtn: function _confirmBtn() {
return parseBtn.call(this, this.confirmBtn, defConfirmBtn);
},
_cancelBtn: function _cancelBtn() {
return parseBtn.call(this, this.cancelBtn, defCancelBtn);
},
isConfirm: function isConfirm() {
return this.type === 'confirm';
},
isPrompt: function isPrompt() {
return this.type === 'prompt';
},
containerClass: function containerClass() {
return 'cube-dialog-' + this.type;
}
},
watch: {
'prompt.value': {
handler: function handler(newVal) {
this.promptValue = newVal;
}
}
},
methods: {
maskClick: function maskClick(e) {
this.maskClosable && this.cancel(e);
},
confirm: function confirm(e) {
if (this._confirmBtn.disabled) {
return;
}
this.hide();
this.$emit(EVENT_CONFIRM, e, this.promptValue);
},
cancel: function cancel(e) {
if (this._cancelBtn.disabled) {
return;
}
this.hide();
this.$emit(EVENT_CANCEL, e);
},
close: function close(e) {
this.hide();
this.$emit(EVENT_CLOSE, e);
}
},
components: {
CubePopup: _popup2.default,
CubeInput: _input2.default
}
};
module.exports = exports['default'];
});
/***/ }),
/* 298 */
/***/ (function(module, exports) {
module.exports={render:function (){var _vm=this;var _h=_vm.$createElement;var _c=_vm._self._c||_h;
return _c('transition', {
attrs: {
"name": "cube-dialog-fade"
}
}, [_c('cube-popup', {
directives: [{
name: "show",
rawName: "v-show",
value: (_vm.isVisible),
expression: "isVisible"
}],
attrs: {
"type": "dialog",
"z-index": _vm.zIndex,
"mask": true,
"center": true
},
on: {
"mask-click": _vm.maskClick
}
}, [_c('div', {
staticClass: "cube-dialog-main"
}, [_c('span', {
directives: [{
name: "show",
rawName: "v-show",
value: (_vm.showClose),
expression: "showClose"
}],
staticClass: "cube-dialog-close",
on: {
"click": _vm.close
}
}, [_c('i', {
staticClass: "cubeic-close"
})]), _vm._v(" "), _c('div', {
class: _vm.containerClass
}, [(_vm.icon) ? _c('p', {
staticClass: "cube-dialog-icon"
}, [_c('i', {
class: _vm.icon
})]) : _vm._e(), _vm._v(" "), (_vm.title || _vm.$slots.title) ? _c('h2', {
staticClass: "cube-dialog-title"
}, [_vm._t("title", [_c('p', {
staticClass: "cube-dialog-title-def"
}, [_vm._v(_vm._s(_vm.title))])])], 2) : _vm._e(), _vm._v(" "), _c('div', {
staticClass: "cube-dialog-content"
}, [_vm._t("content", [_c('div', {
staticClass: "cube-dialog-content-def"
}, [(_vm.content) ? _c('p', {
domProps: {
"innerHTML": _vm._s(_vm.content)
}
}) : _vm._e(), _vm._v(" "), (_vm.isPrompt) ? _c('cube-input', _vm._b({
model: {
value: (_vm.promptValue),
callback: function($$v) {
_vm.promptValue = $$v
},
expression: "promptValue"
}
}, 'cube-input', _vm.prompt, false)) : _vm._e()], 1)])], 2), _vm._v(" "), _c('div', {
staticClass: "cube-dialog-btns",
class: {
'border-right-1px': _vm.isConfirm || _vm.isPrompt
}
}, [_vm._t("btns", [(_vm.isConfirm || _vm.isPrompt) ? _c('a', {
staticClass: "cube-dialog-btn border-top-1px",
class: {
'cube-dialog-btn_highlight': _vm._cancelBtn.active, 'cube-dialog-btn_disabled': _vm._cancelBtn.disabled
},
attrs: {
"href": _vm._cancelBtn.href
},
on: {
"click": _vm.cancel
}
}, [_vm._v(_vm._s(_vm._cancelBtn.text))]) : _vm._e(), _vm._v(" "), _c('a', {
staticClass: "cube-dialog-btn border-top-1px",
class: {
'cube-dialog-btn_highlight': _vm._confirmBtn.active, 'cube-dialog-btn_disabled': _vm._confirmBtn.disabled
},
attrs: {
"href": _vm._confirmBtn.href
},
on: {
"click": _vm.confirm
}
}, [_vm._v(_vm._s(_vm._confirmBtn.text))])])], 2)])])])], 1)
},staticRenderFns: []}
/***/ }),
/* 299 */
/***/ (function(module, exports, __webpack_require__) {
var __WEBPACK_AMD_DEFINE_FACTORY__, __WEBPACK_AMD_DEFINE_ARRAY__, __WEBPACK_AMD_DEFINE_RESULT__;(function (global, factory) {
if (true) {
!(__WEBPACK_AMD_DEFINE_ARRAY__ = [module, exports, __webpack_require__(89)], __WEBPACK_AMD_DEFINE_FACTORY__ = (factory),
__WEBPACK_AMD_DEFINE_RESULT__ = (typeof __WEBPACK_AMD_DEFINE_FACTORY__ === 'function' ?
(__WEBPACK_AMD_DEFINE_FACTORY__.apply(exports, __WEBPACK_AMD_DEFINE_ARRAY__)) : __WEBPACK_AMD_DEFINE_FACTORY__),
__WEBPACK_AMD_DEFINE_RESULT__ !== undefined && (module.exports = __WEBPACK_AMD_DEFINE_RESULT__));
} else if (typeof exports !== "undefined") {
factory(module, exports, require('../../common/helpers/create-api'));
} else {
var mod = {
exports: {}
};
factory(mod, mod.exports, global.createApi);
global.api = mod.exports;
}
})(this, function (module, exports, _createApi) {
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = addDialog;
var _createApi2 = _interopRequireDefault(_createApi);
function _interopRequireDefault(obj) {
return obj && obj.__esModule ? obj : {
default: obj
};
}
function addDialog(Vue, Dialog) {
(0, _createApi2.default)(Vue, Dialog, ['confirm', 'cancel', 'close', 'btn-click', 'link-click'], true);
}
module.exports = exports['default'];
});
/***/ })
/******/ ]);
}); | interopRequireDefault( |
msg.rs | use cosmwasm_std::{Uint128, Addr};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct InstantiateMsg {
pub TOMB: String,
pub SHIBA: String,
pub POOLSTARTTIME: Uint128
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum ExecuteMsg {
Add {
alloc_point: Uint128,
token: Addr,
with_update: bool,
last_reward_time: Uint128
},
Set {
pid: Uint128,
alloc_point: Uint128,
},
MassUpdatePools{},
UpdatePool{
pid: Uint128
},
Deposit{
pid: Uint128,
amount: Uint128
},
Withdraw{
pid: Uint128,
amount: Uint128
},
EmergencyWithdraw{
pid: Uint128
},
SetOperator{
operator: Addr
},
GovernanceRecoverUnsupported{
token: Addr,
amount: Uint128,
to: Addr
},
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
#[serde(rename_all = "snake_case")]
pub enum | {
GetOwner{ },
GetGeneratedReward{
from_time: Uint128,
to_time: Uint128
},
PendingTomb{
pid: Uint128,
user: Addr
},
GetPoolInfo{ },
GetUserInfo{
pid: Uint128,
user: Addr
}
}
// Info of each user.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct UserInfo {
pub amount: Uint128, // How many tokens the user has provided.
pub rewardDebt: Uint128, // Reward debt. See explanation below.
}
// Info of each pool.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]
pub struct PoolInfo {
pub token: Addr, // Address of LP token contract.
pub allocPoint: Uint128, // How many allocation points assigned to this pool. TOMB to distribute.
pub lastRewardTime: Uint128, // Last time that TOMB distribution occurs.
pub accTombPerShare: Uint128, // Accumulated TOMB per share, times 1e18. See below.
pub isStarted: bool, // if lastRewardBlock has passed
} | QueryMsg |
requestResponse_test.go | package wrphttp
import (
"context"
"net/http/httptest"
"testing"
"github.com/Comcast/webpa-common/wrp"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func testRequestContextDefault(t *testing.T) {
assert := assert.New(t)
assert.Equal(context.Background(), new(Request).Context())
}
func testRequestContextCustom(t *testing.T) {
var (
assert = assert.New(t)
ctx = context.WithValue(context.Background(), "asdf", "poiuy")
r = Request{ctx: ctx}
)
assert.Equal(ctx, r.Context())
}
func testRequestWithContextNil(t *testing.T) {
assert := assert.New(t)
assert.Panics(func() {
new(Request).WithContext(nil)
})
}
func testRequestWithContextCustom(t *testing.T) {
var (
assert = assert.New(t)
require = require.New(t)
ctx = context.WithValue(context.Background(), "homer", "simpson")
r = &Request{
Original: httptest.NewRequest("GET", "/", nil),
Entity: new(Entity),
}
c = r.WithContext(ctx)
)
require.NotNil(c)
assert.False(r == c)
assert.True(r.Entity == c.Entity)
assert.Equal(r.Original, c.Original)
assert.Equal(ctx, c.Context())
}
func TestRequest(t *testing.T) {
t.Run("Context", func(t *testing.T) {
t.Run("Default", testRequestContextDefault)
t.Run("Custom", testRequestContextCustom)
})
t.Run("WithContext", func(t *testing.T) {
t.Run("Nil", testRequestWithContextNil)
t.Run("Custom", testRequestWithContextCustom)
})
}
func testEntityResponseWriterInvalidAccept(t *testing.T) {
var (
assert = assert.New(t)
require = require.New(t)
erw = NewEntityResponseWriter(wrp.Msgpack)
httpResponse = httptest.NewRecorder()
wrpRequest = &Request{
Original: httptest.NewRequest("POST", "/", nil),
}
)
require.NotNil(erw)
wrpRequest.Original.Header.Set("Accept", "asd;lfkjasdfkjasdfkjasdf")
wrpResponse, err := erw(httpResponse, wrpRequest)
assert.Nil(wrpResponse)
assert.Error(err)
}
func testEntityResponseWriterSuccess(t *testing.T, defaultFormat, expectedFormat wrp.Format, accept string) {
var (
assert = assert.New(t)
require = require.New(t)
erw = NewEntityResponseWriter(defaultFormat)
httpResponse = httptest.NewRecorder()
wrpRequest = &Request{
Original: httptest.NewRequest("POST", "/", nil),
}
expected = &wrp.Message{
Type: wrp.SimpleRequestResponseMessageType,
ContentType: "text/plain",
Payload: []byte("hi there"),
}
)
require.NotNil(erw)
wrpRequest.Original.Header.Set("Accept", accept)
wrpResponse, err := erw(httpResponse, wrpRequest)
require.NoError(err)
require.NotNil(wrpResponse)
count, err := wrpResponse.WriteWRP(expected)
require.NoError(err)
assert.True(count > 0)
actual := new(wrp.Message)
assert.NoError(wrp.NewDecoder(httpResponse.Body, expectedFormat).Decode(actual))
assert.Equal(*expected, *actual)
}
func TestEntityResponseWriter(t *testing.T) {
t.Run("InvalidAccept", testEntityResponseWriterInvalidAccept)
t.Run("Success", func(t *testing.T) {
for _, defaultFormat := range wrp.AllFormats() {
t.Run(defaultFormat.String(), func(t *testing.T) {
testEntityResponseWriterSuccess(t, defaultFormat, defaultFormat, "")
for _, accept := range wrp.AllFormats() {
t.Run(accept.String(), func(t *testing.T) {
testEntityResponseWriterSuccess(t, defaultFormat, accept, accept.ContentType())
})
}
})
}
}) | } |
|
bench.rs | use crate::command_prelude::*;
use cargo::ops::{self, TestOptions};
pub fn cli() -> App {
subcommand("bench")
.setting(AppSettings::TrailingVarArg)
.about("Execute all benchmarks of a local package")
.arg(opt("quiet", "No output printed to stdout").short("q"))
.arg(
Arg::with_name("BENCHNAME")
.help("If specified, only run benches containing this string in their names"),
)
.arg(
Arg::with_name("args") | .last(true),
)
.arg_targets_all(
"Benchmark only this package's library",
"Benchmark only the specified binary",
"Benchmark all binaries",
"Benchmark only the specified example",
"Benchmark all examples",
"Benchmark only the specified test target",
"Benchmark all tests",
"Benchmark only the specified bench target",
"Benchmark all benches",
"Benchmark all targets",
)
.arg(opt("no-run", "Compile, but don't run benchmarks"))
.arg_package_spec(
"Package to run benchmarks for",
"Benchmark all packages in the workspace",
"Exclude packages from the benchmark",
)
.arg_jobs()
.arg_features()
.arg_target_triple("Build for the target triple")
.arg_target_dir()
.arg_manifest_path()
.arg_ignore_rust_version()
.arg_message_format()
.arg(opt(
"no-fail-fast",
"Run all benchmarks regardless of failure",
))
.arg_unit_graph()
.after_help("Run `cargo help bench` for more detailed information.\n")
}
pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult {
let ws = args.workspace(config)?;
let mut compile_opts = args.compile_options(
config,
CompileMode::Bench,
Some(&ws),
ProfileChecking::Checked,
)?;
compile_opts.build_config.requested_profile =
args.get_profile_name(config, "bench", ProfileChecking::Checked)?;
let ops = TestOptions {
no_run: args.is_present("no-run"),
no_fail_fast: args.is_present("no-fail-fast"),
compile_opts,
};
let bench_args = args.value_of("BENCHNAME").into_iter();
let bench_args = bench_args.chain(args.values_of("args").unwrap_or_default());
let bench_args = bench_args.collect::<Vec<_>>();
let err = ops::run_benches(&ws, &ops, &bench_args)?;
match err {
None => Ok(()),
Some(err) => Err(match err.code {
Some(i) => CliError::new(anyhow::format_err!("bench failed"), i),
None => CliError::new(err.into(), 101),
}),
}
} | .help("Arguments for the bench binary")
.multiple(true) |
index.ts | // tracing: off
import { flow } from "@effect-ts/core/Function"
import { runEither } from "@effect-ts/core/Sync"
import type { M, Summoner } from "../Batteries/summoner"
import { summonFor } from "../Batteries/summoner"
import type { Materialized } from "../Batteries/usage/materializer"
import type {
SummonerEnv,
SummonerInterpURI,
SummonerProgURI
} from "../Batteries/usage/summoner"
import type { DecoderType, DecoderURI } from "./base"
import type { Decoder } from "./common"
import { modelDecoderInterpreter } from "./interpreter"
export { decoderExtension } from "./interpreter"
| Context,
ContextEntry,
Decode,
Decoder,
Errors,
fail,
failures,
makeDecoder,
Validate,
Validation,
ValidationError,
Reporter
} from "./common"
export {
report,
formatValidationErrors,
formatValidationError,
TYPE_MAX_LEN,
ReporterOptions
} from "./reporters"
export function deriveFor<S extends Summoner<any>>(S: S) {
return (_: {
[k in DecoderURI & keyof SummonerEnv<S>]: SummonerEnv<S>[k]
}) =>
<L, A>(
F: Materialized<SummonerEnv<S>, L, A, SummonerProgURI<S>, SummonerInterpURI<S>>
) =>
F.derive(modelDecoderInterpreter<SummonerEnv<S>>())(_)
}
const decoders = new Map<any, any>()
const defDerive = deriveFor(summonFor({}).make)({})
export function decoder<E, A>(F: M<{}, E, A>): Decoder<A> {
if (decoders.has(F)) {
return decoders.get(F).decoder
}
const d = defDerive(F)
decoders.set(F, d)
return d.decoder
}
export function decoderType<E, A>(F: M<{}, E, A>): DecoderType<A> {
if (decoders.has(F)) {
return decoders.get(F)
}
const d = defDerive(F)
decoders.set(F, d)
return d
}
export function runDecode<E, A>(F: M<{}, E, A>) {
return flow(decoder(F).decode, runEither)
}
export function decode<E, A>(F: M<{}, E, A>) {
return decoder(F).decode
} | export { DecoderType, DecoderURI, decoderApplyConfig } from "./base"
export {
appendContext, |
bytes.rs | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use crate::types::{Intern, RawInternKey};
use core::cmp::Ordering;
use fnv::FnvHashMap;
use lazy_static::lazy_static;
use parking_lot::RwLock;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use std::fmt;
use std::num::NonZeroU32;
use std::str::FromStr;
use std::sync::Arc;
/// Slices of bytes intern as BytesKey
impl Intern for &[u8] {
type Key = BytesKey;
fn intern(self) -> Self::Key {
BytesKey(BYTES_TABLE.intern(self))
}
}
/// Owned strings intern as StringKey, with the interning
/// based on the raw bytes of the string
impl Intern for String {
type Key = StringKey;
fn intern(self) -> Self::Key {
StringKey(BYTES_TABLE.intern(self.as_bytes()))
}
}
/// Str (slices) intern as StringKey, with the interning
/// based on the raw bytes of the str.
impl Intern for &str {
type Key = StringKey;
fn intern(self) -> Self::Key {
StringKey(BYTES_TABLE.intern(self.as_bytes()))
}
}
/// Interned bytes
#[derive(Copy, Clone, Eq, Ord, Hash, PartialEq, PartialOrd)]
pub struct BytesKey(RawInternKey);
impl BytesKey {
pub fn lookup(self) -> &'static [u8] {
BYTES_TABLE.lookup(self.0)
}
}
impl fmt::Debug for BytesKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let bytes_value = self.lookup();
write!(f, "{:?}", bytes_value)
}
}
/// An interned string
#[derive(Copy, Clone, Eq, Hash, PartialEq)]
pub struct StringKey(RawInternKey);
impl Ord for StringKey {
fn cmp(&self, other: &Self) -> Ordering {
self.lookup().cmp(&other.lookup())
}
}
impl PartialOrd for StringKey {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.lookup().partial_cmp(&other.lookup())
}
}
impl FromStr for StringKey {
type Err = std::convert::Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(s.intern())
}
}
impl StringKey {
/// Get a reference to the original str.
pub fn lookup(self) -> &'static str {
let bytes = BYTES_TABLE.lookup(self.0);
// This is safe because the bytes we are converting originally came
// from a str when we interned it: the only way to get a StringKey is
// to intern an (already valid) string, so if we have a StringKey then
// its bytes must be valid UTF-8.
unsafe { std::str::from_utf8_unchecked(bytes) }
}
/// Convert the interned string key into an interned bytes key. Because
/// strings intern as their raw bytes, this is an O(1) operation.
/// Note the reverse (BytesKey.as_str) is a fallible operation since
/// the bytes may not be valid UTF-8.
pub fn as_bytes(self) -> BytesKey {
BytesKey(self.0)
}
}
impl fmt::Debug for StringKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let str_value = self.lookup();
write!(f, "{:?}", str_value)
}
}
impl fmt::Display for StringKey {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let str_value = self.lookup();
write!(f, "{}", str_value)
}
}
impl Serialize for StringKey {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.lookup())
}
}
impl<'de> Deserialize<'de> for StringKey {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Deserialize::deserialize(deserializer).map(|s: String| s.intern())
}
}
// Static table used in the bytes/str Intern implementations
lazy_static! {
static ref BYTES_TABLE: BytesTable = BytesTable::new();
}
/// Similar to the generic `InternTable` but customized for sequences of raw bytes (and strings).
pub struct BytesTable {
data: Arc<RwLock<BytesTableData>>,
}
impl BytesTable {
pub fn new() -> Self {
Self {
data: Arc::new(RwLock::new(BytesTableData::new())),
}
}
pub fn intern(&self, value: &[u8]) -> RawInternKey {
if let Some(prev) = self.data.read().get(&value) {
return prev;
}
let mut writer = self.data.write();
writer.intern(value)
}
pub fn lookup(&self, key: RawInternKey) -> &'static [u8] {
self.data.read().lookup(key)
}
}
/// BytesTableData is similar to InternTableData but customized for sequences
/// of raw bytes (and notably, strings).
struct BytesTableData {
// Raw data storage, allocated in large chunks
buffer: Option<&'static mut [u8]>,
// Reverse mapping of index=>value, used to convert an
// interned key back to (a reference to) its value
items: Vec<&'static [u8]>,
// Mapping of values to their interned indices
table: FnvHashMap<&'static [u8], RawInternKey>,
}
impl BytesTableData {
const BUFFER_SIZE: usize = 4096;
pub fn new() -> Self {
Self {
buffer: Some(Self::new_buffer()),
items: vec![
// Add buffer value so the used index starts at 1
// and we can use a NonZero type.
b"<sentinel>",
],
table: Default::default(),
}
}
fn new_buffer() -> &'static mut [u8] {
Box::leak(Box::new([0; Self::BUFFER_SIZE]))
}
pub fn get(&self, value: &[u8]) -> Option<RawInternKey> {
self.table.get(value).cloned()
}
// Copy the byte slice into 'static memory by appending it to a buffer, if there is room.
// If the buffer fills up and the value is small, start over with a new buffer.
// If the value is large, just give it its own memory.
fn alloc(&mut self, value: &[u8]) -> &'static [u8] {
let len = value.len();
let mut buffer = self.buffer.take().unwrap();
if len > buffer.len() {
if len >= Self::BUFFER_SIZE / 16 | else {
buffer = Self::new_buffer()
}
}
let (mem, remaining) = buffer.split_at_mut(len);
mem.copy_from_slice(value);
self.buffer = Some(remaining);
mem
}
pub fn intern(&mut self, value: &[u8]) -> RawInternKey {
// If there's an existing value return it
if let Some(prev) = self.get(&value) {
return prev;
}
// Otherwise intern
let key = RawInternKey::new(unsafe {
// Safe because we initialize `self.items` with a sentinel value
NonZeroU32::new_unchecked(self.items.len() as u32)
});
let static_bytes = self.alloc(value);
self.items.push(static_bytes);
self.table.insert(static_bytes, key);
key
}
pub fn lookup(&self, key: RawInternKey) -> &'static [u8] {
let index = key.as_usize();
self.items[index]
}
}
| {
// This byte slice is so big it can just have its own memory.
self.buffer = Some(buffer);
return Box::leak(value.into());
} |
where.go | // Code generated by entc, DO NOT EDIT.
package role
import (
"time"
"entgo.io/ent/dialect/sql"
"github.com/lalifeier/vvgo-mall/app/shop/admin/internal/data/ent/predicate"
)
// ID filters vertices based on their ID field.
func ID(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
}
// IDEQ applies the EQ predicate on the ID field.
func IDEQ(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldID), id))
})
}
// IDNEQ applies the NEQ predicate on the ID field.
func IDNEQ(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldID), id))
})
}
// IDIn applies the In predicate on the ID field.
func IDIn(ids ...int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(ids) == 0 {
s.Where(sql.False())
return
}
v := make([]interface{}, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.In(s.C(FieldID), v...))
})
}
// IDNotIn applies the NotIn predicate on the ID field.
func IDNotIn(ids ...int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(ids) == 0 {
s.Where(sql.False())
return
}
v := make([]interface{}, len(ids))
for i := range v {
v[i] = ids[i]
}
s.Where(sql.NotIn(s.C(FieldID), v...))
})
}
// IDGT applies the GT predicate on the ID field.
func IDGT(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldID), id))
})
}
// IDGTE applies the GTE predicate on the ID field.
func IDGTE(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldID), id))
})
}
// IDLT applies the LT predicate on the ID field.
func IDLT(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldID), id))
})
}
// IDLTE applies the LTE predicate on the ID field.
func IDLTE(id int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldID), id))
})
}
// CreatedAt applies equality check predicate on the "created_at" field. It's identical to CreatedAtEQ.
func CreatedAt(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
}
// CreatedBy applies equality check predicate on the "created_by" field. It's identical to CreatedByEQ.
func CreatedBy(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedBy), v))
})
}
// UpdatedAt applies equality check predicate on the "updated_at" field. It's identical to UpdatedAtEQ.
func UpdatedAt(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
}
// UpdatedBy applies equality check predicate on the "updated_by" field. It's identical to UpdatedByEQ.
func UpdatedBy(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedBy), v))
})
}
// Name applies equality check predicate on the "name" field. It's identical to NameEQ.
func Name(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldName), v))
})
}
// Sort applies equality check predicate on the "sort" field. It's identical to SortEQ.
func Sort(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldSort), v))
})
}
// Status applies equality check predicate on the "status" field. It's identical to StatusEQ.
func Status(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldStatus), v))
})
}
// Remark applies equality check predicate on the "remark" field. It's identical to RemarkEQ.
func Remark(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldRemark), v))
})
}
// CreatedAtEQ applies the EQ predicate on the "created_at" field.
func CreatedAtEQ(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedAt), v))
})
}
// CreatedAtNEQ applies the NEQ predicate on the "created_at" field.
func CreatedAtNEQ(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldCreatedAt), v))
})
}
// CreatedAtIn applies the In predicate on the "created_at" field.
func CreatedAtIn(vs ...time.Time) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldCreatedAt), v...))
})
}
// CreatedAtNotIn applies the NotIn predicate on the "created_at" field.
func CreatedAtNotIn(vs ...time.Time) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldCreatedAt), v...))
})
}
// CreatedAtGT applies the GT predicate on the "created_at" field.
func CreatedAtGT(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldCreatedAt), v))
})
}
// CreatedAtGTE applies the GTE predicate on the "created_at" field.
func CreatedAtGTE(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldCreatedAt), v))
})
}
// CreatedAtLT applies the LT predicate on the "created_at" field.
func CreatedAtLT(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldCreatedAt), v))
})
}
// CreatedAtLTE applies the LTE predicate on the "created_at" field.
func CreatedAtLTE(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldCreatedAt), v))
})
}
// CreatedByEQ applies the EQ predicate on the "created_by" field.
func CreatedByEQ(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldCreatedBy), v))
})
}
// CreatedByNEQ applies the NEQ predicate on the "created_by" field.
func CreatedByNEQ(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldCreatedBy), v))
})
}
// CreatedByIn applies the In predicate on the "created_by" field.
func CreatedByIn(vs ...int64) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldCreatedBy), v...))
})
}
// CreatedByNotIn applies the NotIn predicate on the "created_by" field.
func CreatedByNotIn(vs ...int64) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldCreatedBy), v...))
})
}
// CreatedByGT applies the GT predicate on the "created_by" field.
func CreatedByGT(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldCreatedBy), v))
})
}
// CreatedByGTE applies the GTE predicate on the "created_by" field.
func CreatedByGTE(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldCreatedBy), v))
})
}
// CreatedByLT applies the LT predicate on the "created_by" field.
func CreatedByLT(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldCreatedBy), v))
})
}
// CreatedByLTE applies the LTE predicate on the "created_by" field.
func CreatedByLTE(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldCreatedBy), v))
})
}
// UpdatedAtEQ applies the EQ predicate on the "updated_at" field.
func UpdatedAtEQ(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedAt), v))
})
}
// UpdatedAtNEQ applies the NEQ predicate on the "updated_at" field.
func UpdatedAtNEQ(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldUpdatedAt), v))
})
}
// UpdatedAtIn applies the In predicate on the "updated_at" field.
func UpdatedAtIn(vs ...time.Time) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldUpdatedAt), v...))
})
}
// UpdatedAtNotIn applies the NotIn predicate on the "updated_at" field.
func UpdatedAtNotIn(vs ...time.Time) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldUpdatedAt), v...))
})
}
// UpdatedAtGT applies the GT predicate on the "updated_at" field.
func UpdatedAtGT(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldUpdatedAt), v))
})
}
// UpdatedAtGTE applies the GTE predicate on the "updated_at" field.
func UpdatedAtGTE(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldUpdatedAt), v))
})
}
// UpdatedAtLT applies the LT predicate on the "updated_at" field.
func UpdatedAtLT(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldUpdatedAt), v))
})
}
// UpdatedAtLTE applies the LTE predicate on the "updated_at" field.
func UpdatedAtLTE(v time.Time) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldUpdatedAt), v))
})
}
// UpdatedByEQ applies the EQ predicate on the "updated_by" field.
func UpdatedByEQ(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldUpdatedBy), v))
})
}
// UpdatedByNEQ applies the NEQ predicate on the "updated_by" field.
func UpdatedByNEQ(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldUpdatedBy), v))
})
}
// UpdatedByIn applies the In predicate on the "updated_by" field.
func UpdatedByIn(vs ...int64) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldUpdatedBy), v...))
})
}
// UpdatedByNotIn applies the NotIn predicate on the "updated_by" field.
func UpdatedByNotIn(vs ...int64) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldUpdatedBy), v...))
})
}
// UpdatedByGT applies the GT predicate on the "updated_by" field.
func UpdatedByGT(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldUpdatedBy), v))
})
}
// UpdatedByGTE applies the GTE predicate on the "updated_by" field.
func UpdatedByGTE(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldUpdatedBy), v))
})
}
// UpdatedByLT applies the LT predicate on the "updated_by" field.
func UpdatedByLT(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldUpdatedBy), v))
})
}
// UpdatedByLTE applies the LTE predicate on the "updated_by" field.
func UpdatedByLTE(v int64) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldUpdatedBy), v))
})
}
// NameEQ applies the EQ predicate on the "name" field.
func NameEQ(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldName), v))
})
}
// NameNEQ applies the NEQ predicate on the "name" field.
func NameNEQ(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldName), v))
})
}
// NameIn applies the In predicate on the "name" field.
func NameIn(vs ...string) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldName), v...))
})
}
// NameNotIn applies the NotIn predicate on the "name" field.
func NameNotIn(vs ...string) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldName), v...))
})
}
// NameGT applies the GT predicate on the "name" field.
func NameGT(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldName), v))
})
}
// NameGTE applies the GTE predicate on the "name" field.
func NameGTE(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldName), v))
})
}
// NameLT applies the LT predicate on the "name" field.
func NameLT(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldName), v))
})
}
// NameLTE applies the LTE predicate on the "name" field.
func NameLTE(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldName), v))
})
}
// NameContains applies the Contains predicate on the "name" field.
func NameContains(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.Contains(s.C(FieldName), v))
})
}
// NameHasPrefix applies the HasPrefix predicate on the "name" field.
func NameHasPrefix(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.HasPrefix(s.C(FieldName), v))
})
}
// NameHasSuffix applies the HasSuffix predicate on the "name" field.
func NameHasSuffix(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.HasSuffix(s.C(FieldName), v))
})
}
// NameEqualFold applies the EqualFold predicate on the "name" field.
func | (v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EqualFold(s.C(FieldName), v))
})
}
// NameContainsFold applies the ContainsFold predicate on the "name" field.
func NameContainsFold(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.ContainsFold(s.C(FieldName), v))
})
}
// SortEQ applies the EQ predicate on the "sort" field.
func SortEQ(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldSort), v))
})
}
// SortNEQ applies the NEQ predicate on the "sort" field.
func SortNEQ(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldSort), v))
})
}
// SortIn applies the In predicate on the "sort" field.
func SortIn(vs ...int8) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldSort), v...))
})
}
// SortNotIn applies the NotIn predicate on the "sort" field.
func SortNotIn(vs ...int8) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldSort), v...))
})
}
// SortGT applies the GT predicate on the "sort" field.
func SortGT(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldSort), v))
})
}
// SortGTE applies the GTE predicate on the "sort" field.
func SortGTE(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldSort), v))
})
}
// SortLT applies the LT predicate on the "sort" field.
func SortLT(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldSort), v))
})
}
// SortLTE applies the LTE predicate on the "sort" field.
func SortLTE(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldSort), v))
})
}
// StatusEQ applies the EQ predicate on the "status" field.
func StatusEQ(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldStatus), v))
})
}
// StatusNEQ applies the NEQ predicate on the "status" field.
func StatusNEQ(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldStatus), v))
})
}
// StatusIn applies the In predicate on the "status" field.
func StatusIn(vs ...int8) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldStatus), v...))
})
}
// StatusNotIn applies the NotIn predicate on the "status" field.
func StatusNotIn(vs ...int8) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldStatus), v...))
})
}
// StatusGT applies the GT predicate on the "status" field.
func StatusGT(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldStatus), v))
})
}
// StatusGTE applies the GTE predicate on the "status" field.
func StatusGTE(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldStatus), v))
})
}
// StatusLT applies the LT predicate on the "status" field.
func StatusLT(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldStatus), v))
})
}
// StatusLTE applies the LTE predicate on the "status" field.
func StatusLTE(v int8) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldStatus), v))
})
}
// RemarkEQ applies the EQ predicate on the "remark" field.
func RemarkEQ(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EQ(s.C(FieldRemark), v))
})
}
// RemarkNEQ applies the NEQ predicate on the "remark" field.
func RemarkNEQ(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.NEQ(s.C(FieldRemark), v))
})
}
// RemarkIn applies the In predicate on the "remark" field.
func RemarkIn(vs ...string) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.In(s.C(FieldRemark), v...))
})
}
// RemarkNotIn applies the NotIn predicate on the "remark" field.
func RemarkNotIn(vs ...string) predicate.Role {
v := make([]interface{}, len(vs))
for i := range v {
v[i] = vs[i]
}
return predicate.Role(func(s *sql.Selector) {
// if not arguments were provided, append the FALSE constants,
// since we can't apply "IN ()". This will make this predicate falsy.
if len(v) == 0 {
s.Where(sql.False())
return
}
s.Where(sql.NotIn(s.C(FieldRemark), v...))
})
}
// RemarkGT applies the GT predicate on the "remark" field.
func RemarkGT(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GT(s.C(FieldRemark), v))
})
}
// RemarkGTE applies the GTE predicate on the "remark" field.
func RemarkGTE(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.GTE(s.C(FieldRemark), v))
})
}
// RemarkLT applies the LT predicate on the "remark" field.
func RemarkLT(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LT(s.C(FieldRemark), v))
})
}
// RemarkLTE applies the LTE predicate on the "remark" field.
func RemarkLTE(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.LTE(s.C(FieldRemark), v))
})
}
// RemarkContains applies the Contains predicate on the "remark" field.
func RemarkContains(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.Contains(s.C(FieldRemark), v))
})
}
// RemarkHasPrefix applies the HasPrefix predicate on the "remark" field.
func RemarkHasPrefix(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.HasPrefix(s.C(FieldRemark), v))
})
}
// RemarkHasSuffix applies the HasSuffix predicate on the "remark" field.
func RemarkHasSuffix(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.HasSuffix(s.C(FieldRemark), v))
})
}
// RemarkEqualFold applies the EqualFold predicate on the "remark" field.
func RemarkEqualFold(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.EqualFold(s.C(FieldRemark), v))
})
}
// RemarkContainsFold applies the ContainsFold predicate on the "remark" field.
func RemarkContainsFold(v string) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s.Where(sql.ContainsFold(s.C(FieldRemark), v))
})
}
// And groups predicates with the AND operator between them.
func And(predicates ...predicate.Role) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for _, p := range predicates {
p(s1)
}
s.Where(s1.P())
})
}
// Or groups predicates with the OR operator between them.
func Or(predicates ...predicate.Role) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
s1 := s.Clone().SetP(nil)
for i, p := range predicates {
if i > 0 {
s1.Or()
}
p(s1)
}
s.Where(s1.P())
})
}
// Not applies the not operator on the given predicate.
func Not(p predicate.Role) predicate.Role {
return predicate.Role(func(s *sql.Selector) {
p(s.Not())
})
}
| NameEqualFold |
custom_test.go | package formatter
import (
"reflect"
"strings"
"testing"
"time"
"github.com/hyperhq/hyper-api/types"
"github.com/hyperhq/hypercli/pkg/stringid"
)
func TestContainerPsContext(t *testing.T) {
containerID := stringid.GenerateRandomID()
unix := time.Now().Unix()
var ctx containerContext
cases := []struct {
container types.Container
trunc bool
expValue string
expHeader string
call func() string
}{
{types.Container{ID: containerID}, true, stringid.TruncateID(containerID), containerIDHeader, ctx.ID},
{types.Container{ID: containerID}, false, containerID, containerIDHeader, ctx.ID},
{types.Container{Names: []string{"/foobar_baz"}}, true, "foobar_baz", namesHeader, ctx.Names},
{types.Container{Image: "ubuntu"}, true, "ubuntu", imageHeader, ctx.Image},
{types.Container{Image: "verylongimagename"}, true, "verylongimagename", imageHeader, ctx.Image},
{types.Container{Image: "verylongimagename"}, false, "verylongimagename", imageHeader, ctx.Image},
{types.Container{
Image: "a5a665ff33eced1e0803148700880edab4",
ImageID: "a5a665ff33eced1e0803148700880edab4269067ed77e27737a708d0d293fbf5",
},
true,
"a5a665ff33ec",
imageHeader,
ctx.Image,
},
{types.Container{
Image: "a5a665ff33eced1e0803148700880edab4",
ImageID: "a5a665ff33eced1e0803148700880edab4269067ed77e27737a708d0d293fbf5",
},
false,
"a5a665ff33eced1e0803148700880edab4",
imageHeader,
ctx.Image,
},
{types.Container{Image: ""}, true, "<no image>", imageHeader, ctx.Image},
{types.Container{Command: "sh -c 'ls -la'"}, true, `"sh -c 'ls -la'"`, commandHeader, ctx.Command},
{types.Container{Created: unix}, true, time.Unix(unix, 0).String(), createdAtHeader, ctx.CreatedAt},
{types.Container{Ports: []types.Port{{PrivatePort: 8080, PublicPort: 8080, Type: "tcp"}}}, true, "8080/tcp", portsHeader, ctx.Ports},
{types.Container{Status: "RUNNING"}, true, "RUNNING", statusHeader, ctx.Status},
{types.Container{SizeRw: 10}, true, "10 B", sizeHeader, ctx.Size},
{types.Container{SizeRw: 10, SizeRootFs: 20}, true, "10 B (virtual 20 B)", sizeHeader, ctx.Size},
{types.Container{}, true, "", labelsHeader, ctx.Labels},
{types.Container{Labels: map[string]string{"cpu": "6", "storage": "ssd"}}, true, "cpu=6,storage=ssd", labelsHeader, ctx.Labels},
{types.Container{Created: unix}, true, "Less than a second", runningForHeader, ctx.RunningFor},
}
for _, c := range cases {
ctx = containerContext{c: c.container, trunc: c.trunc}
v := c.call()
if strings.Contains(v, ",") {
compareMultipleValues(t, v, c.expValue)
} else if v != c.expValue {
t.Fatalf("Expected %s, was %s\n", c.expValue, v)
}
h := ctx.fullHeader()
if h != c.expHeader {
t.Fatalf("Expected %s, was %s\n", c.expHeader, h)
}
}
c1 := types.Container{Labels: map[string]string{"com.docker.swarm.swarm-id": "33", "com.docker.swarm.node_name": "ubuntu"}}
ctx = containerContext{c: c1, trunc: true}
sid := ctx.Label("com.docker.swarm.swarm-id")
node := ctx.Label("com.docker.swarm.node_name")
if sid != "33" {
t.Fatalf("Expected 33, was %s\n", sid)
}
if node != "ubuntu" {
t.Fatalf("Expected ubuntu, was %s\n", node)
}
h := ctx.fullHeader()
if h != "SWARM ID\tNODE NAME" {
t.Fatalf("Expected %s, was %s\n", "SWARM ID\tNODE NAME", h)
}
c2 := types.Container{}
ctx = containerContext{c: c2, trunc: true}
label := ctx.Label("anything.really")
if label != "" {
t.Fatalf("Expected an empty string, was %s", label)
}
ctx = containerContext{c: c2, trunc: true}
fullHeader := ctx.fullHeader()
if fullHeader != "" {
t.Fatalf("Expected fullHeader to be empty, was %s", fullHeader)
}
}
func TestImagesContext(t *testing.T) {
imageID := stringid.GenerateRandomID()
unix := time.Now().Unix()
var ctx imageContext
cases := []struct {
imageCtx imageContext
expValue string
expHeader string
call func() string
}{
{imageContext{
i: types.Image{ID: imageID},
trunc: true,
}, stringid.TruncateID(imageID), imageIDHeader, ctx.ID},
{imageContext{
i: types.Image{ID: imageID},
trunc: false,
}, imageID, imageIDHeader, ctx.ID},
{imageContext{
i: types.Image{Size: 10},
trunc: true,
}, "10 B", sizeHeader, ctx.Size},
{imageContext{
i: types.Image{Created: unix},
trunc: true,
}, time.Unix(unix, 0).String(), createdAtHeader, ctx.CreatedAt},
// FIXME
// {imageContext{
// i: types.Image{Created: unix},
// trunc: true,
// }, units.HumanDuration(time.Unix(unix, 0)), createdSinceHeader, ctx.CreatedSince},
{imageContext{
i: types.Image{},
repo: "busybox",
}, "busybox", repositoryHeader, ctx.Repository},
{imageContext{
i: types.Image{},
tag: "latest",
}, "latest", tagHeader, ctx.Tag},
{imageContext{
i: types.Image{},
digest: "sha256:d149ab53f8718e987c3a3024bb8aa0e2caadf6c0328f1d9d850b2a2a67f2819a",
}, "sha256:d149ab53f8718e987c3a3024bb8aa0e2caadf6c0328f1d9d850b2a2a67f2819a", digestHeader, ctx.Digest},
}
for _, c := range cases {
ctx = c.imageCtx
v := c.call()
if strings.Contains(v, ",") | else if v != c.expValue {
t.Fatalf("Expected %s, was %s\n", c.expValue, v)
}
h := ctx.fullHeader()
if h != c.expHeader {
t.Fatalf("Expected %s, was %s\n", c.expHeader, h)
}
}
}
func compareMultipleValues(t *testing.T, value, expected string) {
// comma-separated values means probably a map input, which won't
// be guaranteed to have the same order as our expected value
// We'll create maps and use reflect.DeepEquals to check instead:
entriesMap := make(map[string]string)
expMap := make(map[string]string)
entries := strings.Split(value, ",")
expectedEntries := strings.Split(expected, ",")
for _, entry := range entries {
keyval := strings.Split(entry, "=")
entriesMap[keyval[0]] = keyval[1]
}
for _, expected := range expectedEntries {
keyval := strings.Split(expected, "=")
expMap[keyval[0]] = keyval[1]
}
if !reflect.DeepEqual(expMap, entriesMap) {
t.Fatalf("Expected entries: %v, got: %v", expected, value)
}
}
| {
compareMultipleValues(t, v, c.expValue)
} |
merchant.service.ts | /**
* Created by thilina on 12/16/16.
*/
import { Injectable } from '@angular/core';
import { Headers, Http,Response } from '@angular/http';
import 'rxjs/add/operator/toPromise';
import { Observable } from 'rxjs';
import { BehaviorSubject } from 'rxjs/BehaviorSubject';
import {Url} from "url";
import {Merchant} from '../../class/merchant';
import {AppSettings} from '../../class/AppSetting'
@Injectable()
export class MerchantService {
private headers = new Headers({'Content-Type': 'application/json'});
private merchantRegisterUrl = AppSettings.DIRECT_PAY_ENDPOINT+'/merchant/register';
private merchantListUrl = AppSettings.DIRECT_PAY_ENDPOINT+'/reports/filterUsers';
private merchantDetailUrl = AppSettings.DIRECT_PAY_ENDPOINT+'/merchant/details';
private merchantDetailByBrNumberUrl = AppSettings.DIRECT_PAY_ENDPOINT+'/merchant/details/brnumber';
private merchantLastTransaction = AppSettings.DIRECT_PAY_ENDPOINT+'/transaction/last';
private merchantLastTransactions = AppSettings.DIRECT_PAY_ENDPOINT+'/transactions/last';
private brUploadUrl = AppSettings.DIRECT_PAY_ENDPOINT+'/merchant/uploadbr';
private merchantUpdate = AppSettings.DIRECT_PAY_ENDPOINT+'/merchant/update/';
constructor(private http: Http) { }
login(user: any): Promise<any> {
return new Promise((resolve, reject) => {
//noinspection TypeScriptUnresolvedFunction
this.http
.post(this.merchantRegisterUrl, JSON.stringify({user: user}), {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json())
resolve(response.json());
},error => {
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
})
}
register(merchant:Merchant): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.post(this.merchantRegisterUrl, JSON.stringify(merchant), {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
merchantDetail(id:any): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.post(this.merchantDetailUrl, JSON.stringify({id:id}), {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
getMerchantList(role): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.post(this.merchantListUrl, JSON.stringify({role:role}),{headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
getData(): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.get(this.merchantListUrl, {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
merchantDetailByBrNumber(data:any): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.post(this.merchantDetailByBrNumberUrl, data, {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
getLastTransaction(id:any):Promise<any>{
return new Promise((resolve,reject)=>{
return this.http
.post(this.merchantLastTransaction,JSON.stringify({id:id}),{headers:this.headers})
.toPromise()
.then( | console.log(response.json())
resolve(response.json());
},
error=>{
console.log(error);
reject(error);
}
)
.catch((err)=>{
console.log(err);
reject(err);
});
}
);
}
getLastTransactions(id:any):Promise<any>{
return new Promise((resolve,reject)=>{
return this.http
.post(this.merchantLastTransactions,JSON.stringify({id:id}),{headers:this.headers})
.toPromise()
.then(
response=>{
console.log(response.json())
resolve(response.json());
},
error=>{
console.log(error);
reject(error);
}
)
.catch((err)=>{
console.log(err);
reject(err);
});
}
);
}
uploadBr(files:any,id:string){
return new Promise((resolve, reject) => {
var formData: any = new FormData();
var xhr = new XMLHttpRequest();
for(var i = 0; i < files.length; i++) {
formData.append("files", files[i], id);
}
xhr.onreadystatechange = function () {
if (xhr.readyState == 4) {
if (xhr.status == 200) {
resolve(JSON.parse(xhr.response));
} else {
reject(xhr.response);
}
}
}
xhr.open("POST", this.brUploadUrl, true);
xhr.send(formData);
// return this.http
// .post(this.brUploadUrl,JSON.stringify(formData),{headers:this.headers})
// .toPromise()
// .then(
// response=>{
// console.log(response.json())
// resolve(response.json());
// },
// error=>{
// console.log(error);
// reject(error);
// }
// )
// .catch((err)=>{
// console.log(err);
// reject(err);
// });
});
}
update(merchant:Merchant): Promise<any> {
return new Promise((resolve, reject) => {
return this.http
.post(this.merchantUpdate, JSON.stringify(merchant), {headers: this.headers})
.toPromise()
.then(response => {
//noinspection TypeScriptUnresolvedFunction
console.log(response.json());
resolve(response.json());
}, error => {
console.log(error);
reject(error);
})
.catch((err) => {
console.log(err);
reject(err);
});
});
}
} | response=>{ |
phash.js | const { parentPort } = require("worker_threads");
const sharp = require("sharp");
/**
* Calculates the perceptual hash of an image.
* @param {Buffer} buf buffer to be calculated
* @returns {Promise<string>} phash
*/
async function makePhash(buf) {
try {
const regularImageBuffer = await sharp(buf)
.greyscale()
.resize(32, 32, { fit: "fill" })
.rotate()
.raw()
.toBuffer();
// Reference: https://github.com/btd/sharp-phash
const SAMPLE_SIZE = 32;
// init sqrt
const sqrt = new Array(SAMPLE_SIZE);
for (let i = 1; i < SAMPLE_SIZE; i++) {
sqrt[i] = 1;
}
sqrt[0] = 1 / Math.sqrt(2.0);
// init cosines
const cosines = new Array(SAMPLE_SIZE);
for (let k = 0; k < SAMPLE_SIZE; k++) {
cosines[k] = new Array(SAMPLE_SIZE);
for (let n = 0; n < SAMPLE_SIZE; n++) {
cosines[k][n] = Math.cos(
((2 * k + 1) / (2.0 * SAMPLE_SIZE)) * n * Math.PI
);
}
}
const LOW_SIZE = 8;
// copy signal
const s = new Array(SAMPLE_SIZE);
for (let x = 0; x < SAMPLE_SIZE; x++) {
s[x] = new Array(SAMPLE_SIZE);
for (let y = 0; y < SAMPLE_SIZE; y++) {
s[x][y] = regularImageBuffer[SAMPLE_SIZE * y + x];
}
}
// apply 2D DCT II
const dct = new Array(SAMPLE_SIZE);
for (let u = 0; u < SAMPLE_SIZE; u++) {
dct[u] = new Array(SAMPLE_SIZE);
for (let v = 0; v < SAMPLE_SIZE; v++) {
let sum = 0;
for (let i = 0; i < SAMPLE_SIZE; i++) {
for (let j = 0; j < SAMPLE_SIZE; j++) {
sum += cosines[i][u] * cosines[j][v] * s[i][j];
}
}
sum *= (sqrt[u] * sqrt[v]) / 4;
dct[u][v] = sum;
}
}
// get AVG on high frequencies
let totalSum = 0;
for (let x = 0; x < LOW_SIZE; x++) {
for (let y = 0; y < LOW_SIZE; y++) {
totalSum += dct[x + 1][y + 1];
}
}
const avg = totalSum / (LOW_SIZE * LOW_SIZE);
// compute hash
let fingerprint = "";
for (let x = 0; x < LOW_SIZE; x++) {
for (let y = 0; y < LOW_SIZE; y++) {
fingerprint += dct[x + 1][y + 1] > avg ? "1" : "0";
}
}
return fingerprint;
} catch (err) {
return null;
}
}
| parentPort.on("message", (buf) =>
makePhash(buf).then((phash) => parentPort.postMessage(phash))
); | |
http.ts | import type { FileSystemDriver, FileSystemNode } from './index';
export class HTTPFS implements FileSystemDriver {
#root: string;
constructor(root: string) {
this.#root = root;
}
async resolveUri(path: string[]): Promise<string> {
const url = new URL(path.join('/'), this.#root);
url.hash = '';
url.search = '';
return url.href;
}
async access(path: string[]): Promise<boolean> {
const url = new URL(path.join('/'), this.#root);
url.hash = '';
url.search = '';
const response = await fetch(url.href, { method: 'HEAD', cache: 'force-cache' });
switch (response.status) { | case 201:
return true;
case 404:
case 403:
return false;
default:
throw new Error(`EHTTP ${response.status} ${response.statusText}`);
}
}
async readDir(path: string[]): Promise<ReadableStream<FileSystemNode>> {
throw new Error('EACCESS');
}
async readFile(path: string[], offset = 0, length?: number): Promise<ReadableStream<Uint8Array>> {
if (path.length === 0)
throw new Error('EISDIR');
const url = new URL(path.join('/'), this.#root);
url.hash = '';
url.search = '';
const response = await fetch(url.href, { cache: 'force-cache' });
switch (response.status) {
case 200:
break;
case 404:
throw new Error('ENOTFOUND');
case 403:
throw new Error('EACCESS');
default:
throw new Error(`EHTTP ${response.status} ${response.statusText}`);
}
return response.body || new ReadableStream({
start(c) {
c.close();
}
});
}
async writeFile(path: string[], offset: 'before' | 'after' | 'override', create: boolean): Promise<WritableStream<Uint8Array>> {
throw new Error('EACCESS');
}
async deleteNode(path: string[], recursive: boolean): Promise<void> {
throw new Error('EACCESS');
}
} | case 200: |
__init__.py | import _plotly_utils.basevalidators
class TypesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="typesrc", parent_name="scattergeo.marker.gradient", **kwargs
): | plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class TypeValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="type", parent_name="scattergeo.marker.gradient", **kwargs
):
super(TypeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["radial", "horizontal", "vertical", "none"]),
**kwargs
)
import _plotly_utils.basevalidators
class ColorsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="colorsrc", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="scattergeo.marker.gradient", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
) | super(TypesrcValidator, self).__init__( |
utilities.py | #!/usr/bin/env python
import numpy as np
from collections import defaultdict
import itertools
from sklearn.metrics import confusion_matrix
def print_data_stats(sens_attr, class_labels):
"""Print a few numbers about the data: Total number of points, number of
protected examples and unprotected examples, and number of protected points
in positive class, and number of unprotected points in positive class. | sens_attr: numpy array
The sensitive attribute of shape=(number_points,).
class_labels: nunmp
The class labels of shape=(number_points,).
"""
non_prot_all = sum(sens_attr == 1.0) # non-protected group
prot_all = len(sens_attr) - non_prot_all # protected group
non_prot_pos = sum(class_labels[sens_attr == 1.0] == 1.0) # non_protected in positive class
prot_pos = sum(class_labels == 1.0) - non_prot_pos # protected in positive class
frac_non_prot_pos = float(non_prot_pos) / float(non_prot_all)
frac_prot_pos = float(prot_pos) / float(prot_all)
print
print("Total data points: %d" % len(sens_attr))
print("# non-protected examples: %d" % non_prot_all)
print("# protected examples: %d" % prot_all)
print("# non-protected examples in positive class: %d (%0.1f%%)" % (non_prot_pos, non_prot_pos * 100.0 / non_prot_all))
print("# protected examples in positive class: %d (%0.1f%%)" % (prot_pos, prot_pos * 100.0 / prot_all))
def get_positive_rate(y_predicted, y_true):
"""Compute the positive rate for given predictions of the class label.
Parameters
----------
y_predicted: numpy array
The predicted class labels of shape=(number_points,).
y_true: numpy array
The true class labels of shape=(number_points,).
Returns
---------
pr: float
The positive rate.
"""
tn, fp, fn, tp = confusion_matrix(y_true, y_predicted).ravel()
pr = (tp+fp) / (tp+fp+tn+fn)
return pr
def get_true_positive_rate(y_predicted, y_true):
"""Compute the true positive rate for given predictions of the class label.
Parameters
----------
y_predicted: numpy array
The predicted class labels of shape=(number_points,).
y_true: numpy array
The true class labels of shape=(number_points,).
Returns
---------
tpr: float
The true positive rate.
"""
tn, fp, fn, tp = confusion_matrix(y_true, y_predicted).ravel()
tpr = tp / (tp+fn)
return tpr
def compute_fairness_measures(y_predicted, y_true, sens_attr):
"""Compute value of demographic parity and equality of opportunity for given predictions.
Parameters
----------
y_predicted: numpy array
The predicted class labels of shape=(number_points,).
y_true: numpy array
The true class labels of shape=(number_points,).
sens_attr: numpy array
The sensitive labels of shape=(number_points,).
Returns
----------
DDP: float
The difference of demographic parity.
DEO: float
The difference of equality of opportunity.
"""
positive_rate_prot = get_positive_rate(y_predicted[sens_attr==-1], y_true[sens_attr==-1])
positive_rate_unprot = get_positive_rate(y_predicted[sens_attr==1], y_true[sens_attr==1])
true_positive_rate_prot = get_true_positive_rate(y_predicted[sens_attr==-1], y_true[sens_attr==-1])
true_positive_rate_unprot = get_true_positive_rate(y_predicted[sens_attr==1], y_true[sens_attr==1])
DDP = positive_rate_unprot - positive_rate_prot
DEO = true_positive_rate_unprot - true_positive_rate_prot
rates = [positive_rate_unprot, positive_rate_prot]
DP = np.min(rates)/(np.max(rates) + 1e-5)
return DDP, DEO, DP
def get_accuracy(y_true, y_predicted):
"""Compute the accuracy for given predicted class labels.
Parameters
----------
y_true: numpy array
The true class labels of shape=(number_points,).
y_predicted: numpy array
The predicted class labels of shape=(number_points,).
Returns
---------
accuracy: float
The accuracy of the predictions.
"""
correct_answers = (y_predicted == y_true).astype(int) # will have 1 when the prediction and the actual label match
accuracy = float(sum(correct_answers)) / float(len(correct_answers))
return accuracy |
Parameters
----------- |
site-recommendations.service.ts | import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Rx';
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import { Http, Response} from '@angular/http';
import { SiteRecommendation } from './site-recommendation.model';
import { SiteRecommendationOwner } from './site-recommendation-owner.model';
import { URLSearchParams } from '@angular/http';
@Injectable()
export class | {
constructor(private http: Http) {
}
public getSiteRecommendations(): Observable<SiteRecommendation[]> {
return this.http.get('/assets/data/site-recommendations.json')
.map((res: Response) => {
const data = res.json();
const elements = data['site-recommendations'];
const returningArray = [];
for (let key = 0; key < elements.length; key++) {
const currEl = elements[key];
currEl.owner = new SiteRecommendationOwner(currEl.owner.name, currEl.owner.role, currEl.owner.company);
returningArray.push(currEl);
}
return <SiteRecommendation[]> returningArray;
})
.catch(this.handleError);
}
private handleError(error: Response) {
return Observable.throw(error.statusText);
}
}
| SiteRecommendationsService |
cddl.rs | #![cfg(feature = "std")]
#![cfg(feature = "additional-controls")]
#![cfg(not(target_arch = "wasm32"))]
use cddl::{lexer_from_str, parser, validate_json_from_str, validator::json};
use std::fs;
#[test]
fn verify_cddl_compiles() -> Result<(), parser::Error> {
for file in fs::read_dir("tests/fixtures/cddl/").unwrap() {
let file = file.unwrap();
if file.path().extension().unwrap() != "cddl" {
continue;
}
let file_content = fs::read_to_string(file.path()).unwrap();
match parser::cddl_from_str(&mut lexer_from_str(&file_content), &file_content, true) {
Ok(_) => println!("file: {:#?} ... success", file.path()),
Err(_) => {
return Err(parser::Error::INCREMENTAL);
}
}
}
Ok(())
}
#[test]
fn | () -> json::Result {
validate_json_from_str(
&fs::read_to_string("tests/fixtures/cddl/reputon.cddl").unwrap(),
&fs::read_to_string("tests/fixtures/json/reputon.json").unwrap(),
None,
)
}
| verify_json_validation |
reactive.py | """
Declares Syncable and Reactive classes which provides baseclasses
for Panel components which sync their state with one or more bokeh
models rendered on the frontend.
"""
import difflib
import sys
import threading
from collections import namedtuple
from functools import partial
import numpy as np
import param
from bokeh.models import LayoutDOM
from tornado import gen
from .config import config
from .io.callbacks import PeriodicCallback
from .io.model import hold
from .io.notebook import push, push_on_root
from .io.server import unlocked
from .io.state import state
from .util import edit_readonly, updating
from .viewable import Renderable, Viewable
LinkWatcher = namedtuple("Watcher","inst cls fn mode onlychanged parameter_names what queued target links transformed bidirectional_watcher")
class Syncable(Renderable):
"""
Syncable is an extension of the Renderable object which can not
only render to a bokeh model but also sync the parameters on the
object with the properties on the model.
In order to bi-directionally link parameters with bokeh model
instances the _link_params and _link_props methods define
callbacks triggered when either the parameter or bokeh property
values change. Since there may not be a 1-to-1 mapping between
parameter and the model property the _process_property_change and
_process_param_change may be overridden to apply any necessary
transformations.
"""
# Timeout if a notebook comm message is swallowed
_timeout = 20000
# Timeout before the first event is processed
_debounce = 50
# Any parameters that require manual updates handling for the models
# e.g. parameters which affect some sub-model
_manual_params = []
# Mapping from parameter name to bokeh model property name
_rename = {}
# Allows defining a mapping from model property name to a JS code
# snippet that transforms the object before serialization
_js_transforms = {}
# Transforms from input value to bokeh property value
_source_transforms = {}
_target_transforms = {}
__abstract = True
def __init__(self, **params):
super().__init__(**params)
# Useful when updating model properties which trigger potentially
# recursive events
self._updating = False
# A dictionary of current property change events
self._events = {}
# Any watchers associated with links between two objects
self._links = []
self._link_params()
# A dictionary of bokeh property changes being processed
self._changing = {}
# Sets up watchers to process manual updates to models
if self._manual_params:
self.param.watch(self._update_manual, self._manual_params)
#----------------------------------------------------------------
# Model API
#----------------------------------------------------------------
def _process_property_change(self, msg):
"""
Transform bokeh model property changes into parameter updates.
Should be overridden to provide appropriate mapping between
parameter value and bokeh model change. By default uses the
_rename class level attribute to map between parameter and
property names.
"""
inverted = {v: k for k, v in self._rename.items()}
return {inverted.get(k, k): v for k, v in msg.items()}
def _process_param_change(self, msg):
"""
Transform parameter changes into bokeh model property updates.
Should be overridden to provide appropriate mapping between
parameter value and bokeh model change. By default uses the
_rename class level attribute to map between parameter and
property names.
"""
properties = {self._rename.get(k, k): v for k, v in msg.items()
if self._rename.get(k, False) is not None}
if 'width' in properties and self.sizing_mode is None:
properties['min_width'] = properties['width']
if 'height' in properties and self.sizing_mode is None:
properties['min_height'] = properties['height']
return properties
@property
def _linkable_params(self):
"""
Parameters that can be linked in JavaScript via source
transforms.
"""
return [p for p in self._synced_params if self._rename.get(p, False) is not None
and self._source_transforms.get(p, False) is not None] + ['loading']
@property
def _synced_params(self):
"""
Parameters which are synced with properties using transforms
applied in the _process_param_change method.
"""
ignored = ['default_layout', 'loading']
return [p for p in self.param if p not in self._manual_params+ignored]
def _init_params(self):
return {k: v for k, v in self.param.get_param_values()
if k in self._synced_params and v is not None}
def _link_params(self):
params = self._synced_params
if params:
watcher = self.param.watch(self._param_change, params)
self._callbacks.append(watcher)
def _link_props(self, model, properties, doc, root, comm=None):
ref = root.ref['id']
if config.embed:
return
for p in properties:
if isinstance(p, tuple):
_, p = p
if comm:
model.on_change(p, partial(self._comm_change, doc, ref, comm))
else:
model.on_change(p, partial(self._server_change, doc, ref))
def _manual_update(self, events, model, doc, root, parent, comm):
"""
Method for handling any manual update events, i.e. events triggered
by changes in the manual params.
"""
def _update_manual(self, *events):
for ref, (model, parent) in self._models.items():
if ref not in state._views or ref in state._fake_roots:
continue
viewable, root, doc, comm = state._views[ref]
if comm or state._unblocked(doc):
with unlocked():
self._manual_update(events, model, doc, root, parent, comm)
if comm and 'embedded' not in root.tags:
push(doc, comm)
else:
cb = partial(self._manual_update, events, model, doc, root, parent, comm)
if doc.session_context:
doc.add_next_tick_callback(cb)
else:
cb()
def _update_model(self, events, msg, root, model, doc, comm):
self._changing[root.ref['id']] = [
attr for attr, value in msg.items()
if not model.lookup(attr).property.matches(getattr(model, attr), value)
]
try:
model.update(**msg)
finally:
del self._changing[root.ref['id']]
def _cleanup(self, root):
super()._cleanup(root)
ref = root.ref['id']
self._models.pop(ref, None)
comm, client_comm = self._comms.pop(ref, (None, None))
if comm:
try:
comm.close()
except Exception:
pass
if client_comm:
try:
client_comm.close()
except Exception:
pass
def _param_change(self, *events):
msgs = []
for event in events:
msg = self._process_param_change({event.name: event.new})
if msg:
msgs.append(msg)
events = {event.name: event for event in events}
msg = {k: v for msg in msgs for k, v in msg.items()}
if not msg:
return
for ref, (model, parent) in self._models.items():
if ref not in state._views or ref in state._fake_roots:
continue
viewable, root, doc, comm = state._views[ref]
if comm or not doc.session_context or state._unblocked(doc):
with unlocked():
self._update_model(events, msg, root, model, doc, comm)
if comm and 'embedded' not in root.tags:
push(doc, comm)
else:
cb = partial(self._update_model, events, msg, root, model, doc, comm)
doc.add_next_tick_callback(cb)
def _process_events(self, events):
with edit_readonly(state):
state.busy = True
try:
with edit_readonly(self):
self.param.set_param(**self._process_property_change(events))
finally:
with edit_readonly(state):
state.busy = False
@gen.coroutine
def _change_coroutine(self, doc=None):
self._change_event(doc)
def _change_event(self, doc=None):
try:
state.curdoc = doc
thread = threading.current_thread()
thread_id = thread.ident if thread else None
state._thread_id = thread_id
events = self._events
self._events = {}
self._process_events(events)
finally:
state.curdoc = None
state._thread_id = None
def _comm_change(self, doc, ref, comm, attr, old, new):
if attr in self._changing.get(ref, []):
self._changing[ref].remove(attr)
return
with hold(doc, comm=comm):
self._process_events({attr: new})
def _server_change(self, doc, ref, attr, old, new):
if attr in self._changing.get(ref, []):
self._changing[ref].remove(attr)
return
state._locks.clear()
processing = bool(self._events)
self._events.update({attr: new})
if not processing:
if doc.session_context:
doc.add_timeout_callback(partial(self._change_coroutine, doc), self._debounce)
else:
self._change_event(doc)
class Reactive(Syncable, Viewable):
"""
Reactive is a Viewable object that also supports syncing between
the objects parameters and the underlying bokeh model either via
the defined pyviz_comms.Comm type or using bokeh server.
In addition it defines various methods which make it easy to link
the parameters to other objects.
"""
#----------------------------------------------------------------
# Public API
#----------------------------------------------------------------
def add_periodic_callback(self, callback, period=500, count=None,
timeout=None, start=True):
"""
Schedules a periodic callback to be run at an interval set by
the period. Returns a PeriodicCallback object with the option
to stop and start the callback.
Arguments
---------
callback: callable
Callable function to be executed at periodic interval.
period: int
Interval in milliseconds at which callback will be executed.
count: int
Maximum number of times callback will be invoked.
timeout: int
Timeout in seconds when the callback should be stopped.
start: boolean (default=True)
Whether to start callback immediately.
Returns
-------
Return a PeriodicCallback object with start and stop methods.
"""
self.param.warning(
"Calling add_periodic_callback on a Panel component is "
"deprecated and will be removed in the next minor release. "
"Use the pn.state.add_periodic_callback API instead."
)
cb = PeriodicCallback(callback=callback, period=period,
count=count, timeout=timeout)
if start:
cb.start()
return cb
def link(self, target, callbacks=None, bidirectional=False, **links):
"""
Links the parameters on this object to attributes on another
object in Python. Supports two modes, either specify a mapping
between the source and target object parameters as keywords or
provide a dictionary of callbacks which maps from the source
parameter to a callback which is triggered when the parameter
changes.
Arguments
---------
target: object
The target object of the link.
callbacks: dict
Maps from a parameter in the source object to a callback.
bidirectional: boolean
Whether to link source and target bi-directionally
**links: dict
Maps between parameters on this object to the parameters
on the supplied object.
"""
if links and callbacks:
raise ValueError('Either supply a set of parameters to '
'link as keywords or a set of callbacks, '
'not both.')
elif not links and not callbacks:
raise ValueError('Declare parameters to link or a set of '
'callbacks, neither was defined.')
elif callbacks and bidirectional:
raise ValueError('Bidirectional linking not supported for '
'explicit callbacks. You must define '
'separate callbacks for each direction.')
_updating = []
def link(*events):
for event in events:
if event.name in _updating: continue
_updating.append(event.name)
try:
if callbacks:
callbacks[event.name](target, event)
else:
setattr(target, links[event.name], event.new)
finally:
_updating.pop(_updating.index(event.name))
params = list(callbacks) if callbacks else list(links)
cb = self.param.watch(link, params)
bidirectional_watcher = None
if bidirectional:
_reverse_updating = []
reverse_links = {v: k for k, v in links.items()}
def reverse_link(*events):
for event in events:
if event.name in _reverse_updating: continue
_reverse_updating.append(event.name)
try:
setattr(self, reverse_links[event.name], event.new)
finally:
_reverse_updating.remove(event.name)
bidirectional_watcher = target.param.watch(reverse_link, list(reverse_links))
link = LinkWatcher(*tuple(cb)+(target, links, callbacks is not None, bidirectional_watcher))
self._links.append(link)
return cb
def controls(self, parameters=[], jslink=True):
"""
Creates a set of widgets which allow manipulating the parameters
on this instance. By default all parameters which support
linking are exposed, but an explicit list of parameters can
be provided.
Arguments
---------
parameters: list(str)
An explicit list of parameters to return controls for.
jslink: bool
Whether to use jslinks instead of Python based links.
This does not allow using all types of parameters.
Returns
-------
A layout of the controls
"""
from .param import Param
from .layout import Tabs, WidgetBox
from .widgets import LiteralInput
if parameters:
linkable = parameters
elif jslink:
linkable = self._linkable_params + ['loading']
else:
linkable = list(self.param)
params = [p for p in linkable if p not in Viewable.param]
controls = Param(self.param, parameters=params, default_layout=WidgetBox,
name='Controls')
layout_params = [p for p in linkable if p in Viewable.param]
if 'name' not in layout_params and self._rename.get('name', False) is not None and not parameters:
layout_params.insert(0, 'name')
style = Param(self.param, parameters=layout_params, default_layout=WidgetBox,
name='Layout')
if jslink:
for p in params:
widget = controls._widgets[p]
widget.jslink(self, value=p, bidirectional=True)
if isinstance(widget, LiteralInput):
widget.serializer = 'json'
for p in layout_params:
widget = style._widgets[p]
widget.jslink(self, value=p, bidirectional=p != 'loading')
if isinstance(widget, LiteralInput):
widget.serializer = 'json'
if params and layout_params:
return Tabs(controls.layout[0], style.layout[0])
elif params:
return controls.layout[0]
return style.layout[0]
def jscallback(self, args={}, **callbacks):
"""
Allows defining a JS callback to be triggered when a property
changes on the source object. The keyword arguments define the
properties that trigger a callback and the JS code that gets
executed.
Arguments
----------
args: dict
A mapping of objects to make available to the JS callback
**callbacks: dict
A mapping between properties on the source model and the code
to execute when that property changes
Returns
-------
callback: Callback
The Callback which can be used to disable the callback.
"""
from .links import Callback
for k, v in list(callbacks.items()):
callbacks[k] = self._rename.get(v, v)
return Callback(self, code=callbacks, args=args)
def jslink(self, target, code=None, args=None, bidirectional=False, **links):
"""
Links properties on the source object to those on the target
object in JS code. Supports two modes, either specify a
mapping between the source and target model properties as
keywords or provide a dictionary of JS code snippets which
maps from the source parameter to a JS code snippet which is
executed when the property changes.
Arguments
----------
target: HoloViews object or bokeh Model or panel Viewable
The target to link the value to.
code: dict
Custom code which will be executed when the widget value
changes.
bidirectional: boolean
Whether to link source and target bi-directionally
**links: dict
A mapping between properties on the source model and the
target model property to link it to.
Returns
-------
link: GenericLink
The GenericLink which can be used unlink the widget and
the target model.
"""
if links and code:
raise ValueError('Either supply a set of properties to '
'link as keywords or a set of JS code '
'callbacks, not both.')
elif not links and not code:
raise ValueError('Declare parameters to link or a set of '
'callbacks, neither was defined.')
if args is None:
args = {}
mapping = code or links
for k in mapping:
if k.startswith('event:'):
continue
elif hasattr(self, 'object') and isinstance(self.object, LayoutDOM):
current = self.object
for attr in k.split('.'):
if not hasattr(current, attr):
raise ValueError(f"Could not resolve {k} on "
f"{self.object} model. Ensure "
"you jslink an attribute that "
"exists on the bokeh model.")
current = getattr(current, attr)
elif (k not in self.param and k not in list(self._rename.values())):
matches = difflib.get_close_matches(k, list(self.param))
if matches:
matches = ' Similar parameters include: %r' % matches
else:
matches = ''
raise ValueError("Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (k, type(self).__name__, matches))
elif (self._source_transforms.get(k, False) is None or
self._rename.get(k, False) is None):
raise ValueError("Cannot jslink %r parameter on %s object, "
"the parameter requires a live Python kernel "
"to have an effect." % (k, type(self).__name__))
if isinstance(target, Syncable) and code is None:
for k, p in mapping.items():
if k.startswith('event:'):
continue
elif p not in target.param and p not in list(target._rename.values()):
matches = difflib.get_close_matches(p, list(target.param))
if matches:
matches = ' Similar parameters include: %r' % matches
else:
matches = ''
raise ValueError("Could not jslink %r parameter (or property) "
"on %s object because it was not found.%s"
% (p, type(self).__name__, matches))
elif (target._source_transforms.get(p, False) is None or
target._rename.get(p, False) is None):
raise ValueError("Cannot jslink %r parameter on %s object "
"to %r parameter on %s object. It requires "
"a live Python kernel to have an effect."
% (k, type(self).__name__, p, type(target).__name__))
from .links import Link
return Link(self, target, properties=links, code=code, args=args,
bidirectional=bidirectional)
class SyncableData(Reactive):
"""
A baseclass for components which sync one or more data parameters
with the frontend via a ColumnDataSource.
"""
selection = param.List(default=[], doc="""
The currently selected rows in the data.""")
# Parameters which when changed require an update of the data
_data_params = []
_rename = {'selection': None} | __abstract = True
def __init__(self, **params):
super().__init__(**params)
self._data = None
self._processed = None
self.param.watch(self._validate, self._data_params)
if self._data_params:
self.param.watch(self._update_cds, self._data_params)
self.param.watch(self._update_selected, 'selection')
self._validate(None)
self._update_cds()
def _validate(self, event):
"""
Allows implementing validation for the data parameters.
"""
def _get_data(self):
"""
Implemented by subclasses converting data parameter(s) into
a ColumnDataSource compatible data dictionary.
Returns
-------
processed: object
Raw data after pre-processing (e.g. after filtering)
data: dict
Dictionary of columns used to instantiate and update the
ColumnDataSource
"""
def _update_column(self, column, array):
"""
Implemented by subclasses converting changes in columns to
changes in the data parameter.
Parameters
----------
column: str
The name of the column to update.
array: numpy.ndarray
The array data to update the column with.
"""
data = getattr(self, self._data_params[0])
data[column] = array
def _update_data(self, data):
self.param.set_param(**{self._data_params[0]: data})
def _manual_update(self, events, model, doc, root, parent, comm):
for event in events:
if event.type == 'triggered' and self._updating:
continue
elif hasattr(self, '_update_' + event.name):
getattr(self, '_update_' + event.name)(model)
def _update_cds(self, *events):
if self._updating:
return
self._processed, self._data = self._get_data()
for ref, (m, _) in self._models.items():
m.source.data = self._data
push_on_root(ref)
def _update_selected(self, *events, indices=None):
if self._updating:
return
indices = self.selection if indices is None else indices
for ref, (m, _) in self._models.items():
m.source.selected.indices = indices
push_on_root(ref)
@updating
def _stream(self, stream, rollover=None):
for ref, (m, _) in self._models.items():
m.source.stream(stream, rollover)
push_on_root(ref)
@updating
def _patch(self, patch):
for ref, (m, _) in self._models.items():
m.source.patch(patch)
push_on_root(ref)
def stream(self, stream_value, rollover=None, reset_index=True):
"""
Streams (appends) the `stream_value` provided to the existing
value in an efficient manner.
Arguments
---------
stream_value: (Union[pd.DataFrame, pd.Series, Dict])
The new value(s) to append to the existing value.
rollover: int
A maximum column size, above which data from the start of
the column begins to be discarded. If None, then columns
will continue to grow unbounded.
reset_index (bool, default=True):
If True and the stream_value is a DataFrame, then its index
is reset. Helps to keep the index unique and named `index`.
Raises
------
ValueError: Raised if the stream_value is not a supported type.
Examples
--------
Stream a Series to a DataFrame
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> stream_value = pd.Series({"x": 4, "y": "d"})
>>> obj.stream(stream_value)
>>> obj.value.to_dict("list")
{'x': [1, 2, 4], 'y': ['a', 'b', 'd']}
Stream a Dataframe to a Dataframe
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> stream_value = pd.DataFrame({"x": [3, 4], "y": ["c", "d"]})
>>> obj.stream(stream_value)
>>> obj.value.to_dict("list")
{'x': [1, 2, 3, 4], 'y': ['a', 'b', 'c', 'd']}
Stream a Dictionary row to a DataFrame
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> tabulator = DataComponent(value)
>>> stream_value = {"x": 4, "y": "d"}
>>> obj.stream(stream_value)
>>> obj.value.to_dict("list")
{'x': [1, 2, 4], 'y': ['a', 'b', 'd']}
Stream a Dictionary of Columns to a Dataframe
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> stream_value = {"x": [3, 4], "y": ["c", "d"]}
>>> obj.stream(stream_value)
>>> obj.value.to_dict("list")
{'x': [1, 2, 3, 4], 'y': ['a', 'b', 'c', 'd']}
"""
if 'pandas' in sys.modules:
import pandas as pd
else:
pd = None
if pd and isinstance(stream_value, pd.DataFrame):
if isinstance(self._processed, dict):
self.stream(stream_value.to_dict(), rollover)
return
if reset_index:
value_index_start = self._processed.index.max() + 1
stream_value = stream_value.reset_index(drop=True)
stream_value.index += value_index_start
combined = pd.concat([self._processed, stream_value])
if rollover is not None:
combined = combined.iloc[-rollover:]
with param.discard_events(self):
self._update_data(combined)
try:
self._updating = True
self.param.trigger(self._data_params[0])
finally:
self._updating = False
try:
self._updating = True
self._stream(stream_value, rollover)
finally:
self._updating = False
elif pd and isinstance(stream_value, pd.Series):
if isinstance(self._processed, dict):
self.stream({k: [v] for k, v in stream_value.to_dict().items()}, rollover)
return
value_index_start = self._processed.index.max() + 1
self._processed.loc[value_index_start] = stream_value
with param.discard_events(self):
self._update_data(self._processed)
self._updating = True
try:
self._stream(self._processed.iloc[-1:], rollover)
finally:
self._updating = False
elif isinstance(stream_value, dict):
if isinstance(self._processed, dict):
if not all(col in stream_value for col in self._data):
raise ValueError("Stream update must append to all columns.")
for col, array in stream_value.items():
combined = np.concatenate([self._data[col], array])
if rollover is not None:
combined = combined[-rollover:]
self._update_column(col, combined)
self._updating = True
try:
self._stream(stream_value, rollover)
finally:
self._updating = False
else:
try:
stream_value = pd.DataFrame(stream_value)
except ValueError:
stream_value = pd.Series(stream_value)
self.stream(stream_value)
else:
raise ValueError("The stream value provided is not a DataFrame, Series or Dict!")
def patch(self, patch_value):
"""
Efficiently patches (updates) the existing value with the `patch_value`.
Arguments
---------
patch_value: (Union[pd.DataFrame, pd.Series, Dict])
The value(s) to patch the existing value with.
Raises
------
ValueError: Raised if the patch_value is not a supported type.
Examples
--------
Patch a DataFrame with a Dictionary row.
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> patch_value = {"x": [(0, 3)]}
>>> obj.patch(patch_value)
>>> obj.value.to_dict("list")
{'x': [3, 2], 'y': ['a', 'b']}
Patch a Dataframe with a Dictionary of Columns.
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> patch_value = {"x": [(slice(2), (3,4))], "y": [(1,'d')]}
>>> obj.patch(patch_value)
>>> obj.value.to_dict("list")
{'x': [3, 4], 'y': ['a', 'd']}
Patch a DataFrame with a Series. Please note the index is used in the update.
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> patch_value = pd.Series({"index": 1, "x": 4, "y": "d"})
>>> obj.patch(patch_value)
>>> obj.value.to_dict("list")
{'x': [1, 4], 'y': ['a', 'd']}
Patch a Dataframe with a Dataframe. Please note the index is used in the update.
>>> value = pd.DataFrame({"x": [1, 2], "y": ["a", "b"]})
>>> obj = DataComponent(value)
>>> patch_value = pd.DataFrame({"x": [3, 4], "y": ["c", "d"]})
>>> obj.patch(patch_value)
>>> obj.value.to_dict("list")
{'x': [3, 4], 'y': ['c', 'd']}
"""
if self._processed is None or isinstance(patch_value, dict):
self._patch(patch_value)
return
if 'pandas' in sys.modules:
import pandas as pd
else:
pd = None
data = getattr(self, self._data_params[0])
if pd and isinstance(patch_value, pd.DataFrame):
patch_value_dict = {}
for column in patch_value.columns:
patch_value_dict[column] = []
for index in patch_value.index:
patch_value_dict[column].append((index, patch_value.loc[index, column]))
self.patch(patch_value_dict)
elif pd and isinstance(patch_value, pd.Series):
if "index" in patch_value: # Series orient is row
patch_value_dict = {
k: [(patch_value["index"], v)] for k, v in patch_value.items()
}
patch_value_dict.pop("index")
else: # Series orient is column
patch_value_dict = {
patch_value.name: [(index, value) for index, value in patch_value.items()]
}
self.patch(patch_value_dict)
elif isinstance(patch_value, dict):
for k, v in patch_value.items():
for index, patch in v:
if pd and isinstance(self._processed, pd.DataFrame):
data.loc[index, k] = patch
else:
data[k][index] = patch
self._updating = True
try:
self._patch(patch_value)
finally:
self._updating = False
else:
raise ValueError(
f"Patching with a patch_value of type {type(patch_value).__name__} "
"is not supported. Please provide a DataFrame, Series or Dict."
)
class ReactiveData(SyncableData):
"""
An extension of SyncableData which bi-directionally syncs a data
parameter between frontend and backend using a ColumnDataSource.
"""
def _update_selection(self, indices):
self.selection = indices
def _process_events(self, events):
if 'data' in events:
data = events.pop('data')
if self._updating:
data = {}
_, old_data = self._get_data()
updated = False
for k, v in data.items():
if k in self.indexes:
continue
k = self._renamed_cols.get(k, k)
if isinstance(v, dict):
v = [v for _, v in sorted(v.items(), key=lambda it: int(it[0]))]
try:
isequal = (old_data[k] == np.asarray(v)).all()
except Exception:
isequal = False
if not isequal:
self._update_column(k, v)
updated = True
if updated:
self._updating = True
try:
self.param.trigger('value')
finally:
self._updating = False
if 'indices' in events:
self._updating = True
try:
self._update_selection(events.pop('indices'))
finally:
self._updating = False
super(ReactiveData, self)._process_events(events) | |
REF_11_main.py | #!/usr/bin/env python
import webapp2
from google.appengine.api import app_identity
from google.appengine.api import mail | """Set Announcement in Memcache."""
header = self.request.headers.get('X-AppEngine-Cron', None)
if not header:
raise ValueError('attempt to access cron handler directly, '
'missing custom App Engine header')
ConferenceApi._cacheAnnouncement()
self.response.set_status(204)
class SendConfirmationEmailHandler(webapp2.RequestHandler):
def post(self):
"""Send email confirming Conference creation."""
header = self.request.headers.get('X-AppEngine-QueueName', None)
if not header:
raise ValueError('attempt to access task handler directly, '
'missing custom App Engine header')
mail.send_mail(
'noreply@%s.appspotmail.com' % (
app_identity.get_application_id()), # from
self.request.get('email'), # to
'You created a new Conference!', # subj
'Hi, you have created a following ' # body
'conference:\r\n\r\n%s' % self.request.get(
'conferenceInfo')
)
app = webapp2.WSGIApplication([
('/crons/set_announcement', SetAnnouncementHandler),
('/tasks/send_confirmation_email', SendConfirmationEmailHandler)
], debug=True) | from conference import ConferenceApi
class SetAnnouncementHandler(webapp2.RequestHandler):
def get(self): |
main.py | # Copyright (c) 2018, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import argparse
import json
import os
import torch
import numpy as np
from ocr import PytorchOCRTrainer
torch.backends.cudnn.enabled = False
torch.set_printoptions(precision=10)
class | (dict):
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: " + name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: " + name)
def ascii_encode_dict(data):
ascii_encode = lambda x: x.encode('ascii')
return dict(map(ascii_encode, pair) if isinstance(pair[1], unicode) else pair for pair in data.items())
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='OCR training')
parser.add_argument('--params', '-p', default="default_trainer_params.json", help='Path to params JSON file. Default ignored when resuming.')
parser.add_argument('--experiments', '-e', default="experiments", help='Path for experiments. Ignored when resuming.')
parser.add_argument('--input', '-i', help='Path to input checkpoint.')
parser.add_argument('--pretrained_policy', default="RESUME", help='RESUME/RETRAIN.')
parser.add_argument('--init_bn_fc_fusion', default=False, action='store_true', help='Init BN FC fusion.')
parser.add_argument('--eval', default=False, action='store_true', help='Perform only evaluation on val dataset.')
parser.add_argument('--export', default=False, action='store_true', help='Perform only export of quantized weights.')
parser.add_argument('--no_cuda', default=False, action='store_true', help='Run on CPU.')
parser.add_argument('--export_test_image', default=False, action='store_true', help='Export pre-quantized and reshaped test image.')
parser.add_argument('--valid', default="db_files_uw3-500/valid.txt", help='Input path for val file.')
parser.add_argument('--sortedtrain', default="db_files_uw3-500/sortedTrain.txt", help='Input path for train file.')
parser.add_argument('--imgs', default="db_files_uw3-500/imgs", help='Input path for images dir.')
parser.add_argument('--dry_run', default=False, action='store_true', help='Do not write any output file.')
parser.add_argument('--simd_factor', default=1, type=int, help='SIMD factor for export.')
parser.add_argument('--pe', default=1, type=int, help='Number of PEs for export.')
#Overrides
parser.add_argument('--random_seed', type=int)
parser.add_argument('--batch_size', type=int)
parser.add_argument('--num_workers', type=int)
parser.add_argument('--layer_size', type=int)
parser.add_argument('--neuron_type', type=str)
parser.add_argument('--target_height', type=int)
parser.add_argument('--epochs', type=int)
parser.add_argument('--lr', type=float)
parser.add_argument('--lr_schedule', type=str)
parser.add_argument('--lr_step', type=int)
parser.add_argument('--lr_gamma', type=float)
parser.add_argument('--max_norm', type=float)
parser.add_argument('--seq_to_random_threshold', type=int)
parser.add_argument('--bidirectional', type=bool)
parser.add_argument('--reduce_bidirectional', type=str)
parser.add_argument('--recurrent_bias_enabled', type=bool)
parser.add_argument('--checkpoint_interval', type=int)
parser.add_argument('--recurrent_weight_bit_width', type=int)
parser.add_argument('--recurrent_weight_quantization', type=str)
parser.add_argument('--recurrent_bias_bit_width', type=int)
parser.add_argument('--recurrent_bias_quantization', type=str)
parser.add_argument('--recurrent_activation_bit_width', type=int)
parser.add_argument('--recurrent_activation_quantization', type=str)
parser.add_argument('--internal_activation_bit_width', type=int)
parser.add_argument('--fc_weight_bit_width', type=int)
parser.add_argument('--fc_weight_quantization', type=str)
parser.add_argument('--fc_bias_bit_width', type=int)
parser.add_argument('--fc_bias_quantization', type=str)
parser.add_argument('--quantize_input', type=bool)
parser.add_argument('--mask_padded', type=bool)
args = parser.parse_args()
#Set paths relative to main.py
path_args = ['params', 'experiments', 'input', 'valid', 'sortedtrain', 'imgs']
for path_arg in path_args:
path = getattr(args, path_arg)
if path is not None and not os.path.isabs(path):
abs_path = os.path.abspath(os.path.join(os.path.dirname(__file__), path))
setattr(args, path_arg, abs_path)
#Avoid creating new folders etc.
if args.eval or args.export or args.export_test_image:
args.dry_run = True
#force cpu when exporting weights
if args.export or args.export_test_image:
args.no_cuda = True
if args.input and args.pretrained_policy == "RESUME" and args.params == "default_trainer_params.json":
package = torch.load(args.input, map_location=lambda storage, loc: storage)
trainer_params = package['trainer_params']
else:
with open(args.params) as d:
trainer_params = json.load(d, object_hook=ascii_encode_dict)
trainer_params = objdict(trainer_params)
#Overrides
if args.epochs is not None:
trainer_params.epochs = args.epochs
if args.internal_activation_bit_width is not None:
trainer_params.internal_activation_bit_width = args.internal_activation_bit_width
trainer = PytorchOCRTrainer(trainer_params, args)
if args.export_test_image:
trainer.export_test_image(trainer_params.target_height)
exit(0)
if args.export:
trainer.export_model(args.simd_factor, args.pe)
exit(0)
if args.eval:
trainer.eval_model()
else:
trainer.train_model()
| objdict |
error.rs | pub type Errno = libc::c_int;
/// This error type for `Daemonize` `start` method.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct Error {
kind: ErrorKind,
}
/// This error type for `Daemonize` `start` method.
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub enum ErrorKind {
Fork(Errno),
DetachSession(Errno),
GroupNotFound,
GroupContainsNul,
SetGroup(Errno),
UserNotFound,
UserContainsNul,
SetUser(Errno),
ChangeDirectory(Errno),
PathContainsNul,
OpenPidfile(Errno),
GetPidfileFlags(Errno),
SetPidfileFlags(Errno),
LockPidfile(Errno),
ChownPidfile(Errno),
OpenDevnull(Errno),
RedirectStreams(Errno),
CloseDevnull(Errno),
TruncatePidfile(Errno),
WritePid(Errno),
WritePidUnspecifiedError,
Chroot(Errno),
}
impl ErrorKind {
fn description(&self) -> &str {
match self {
ErrorKind::Fork(_) => "unable to fork",
ErrorKind::DetachSession(_) => "unable to create new session",
ErrorKind::GroupNotFound => "unable to resolve group name to group id",
ErrorKind::GroupContainsNul => "group option contains NUL",
ErrorKind::SetGroup(_) => "unable to set group",
ErrorKind::UserNotFound => "unable to resolve user name to user id",
ErrorKind::UserContainsNul => "user option contains NUL",
ErrorKind::SetUser(_) => "unable to set user",
ErrorKind::ChangeDirectory(_) => "unable to change directory",
ErrorKind::PathContainsNul => "pid_file option contains NUL",
ErrorKind::OpenPidfile(_) => "unable to open pid file",
ErrorKind::GetPidfileFlags(_) => "unable get pid file flags",
ErrorKind::SetPidfileFlags(_) => "unable set pid file flags",
ErrorKind::LockPidfile(_) => "unable to lock pid file",
ErrorKind::ChownPidfile(_) => "unable to chown pid file",
ErrorKind::OpenDevnull(_) => "unable to open /dev/null",
ErrorKind::RedirectStreams(_) => "unable to redirect standard streams to /dev/null",
ErrorKind::CloseDevnull(_) => "unable to close /dev/null",
ErrorKind::TruncatePidfile(_) => "unable to truncate pid file",
ErrorKind::WritePid(_) => "unable to write self pid to pid file",
ErrorKind::WritePidUnspecifiedError => {
"unable to write self pid to pid file due to unknown reason"
}
ErrorKind::Chroot(_) => "unable to chroot into directory",
}
}
fn errno(&self) -> Option<Errno> {
match self {
ErrorKind::Fork(errno) => Some(*errno),
ErrorKind::DetachSession(errno) => Some(*errno),
ErrorKind::GroupNotFound => None,
ErrorKind::GroupContainsNul => None,
ErrorKind::SetGroup(errno) => Some(*errno),
ErrorKind::UserNotFound => None,
ErrorKind::UserContainsNul => None,
ErrorKind::SetUser(errno) => Some(*errno),
ErrorKind::ChangeDirectory(errno) => Some(*errno),
ErrorKind::PathContainsNul => None,
ErrorKind::OpenPidfile(errno) => Some(*errno),
ErrorKind::GetPidfileFlags(errno) => Some(*errno),
ErrorKind::SetPidfileFlags(errno) => Some(*errno),
ErrorKind::LockPidfile(errno) => Some(*errno),
ErrorKind::ChownPidfile(errno) => Some(*errno),
ErrorKind::OpenDevnull(errno) => Some(*errno),
ErrorKind::RedirectStreams(errno) => Some(*errno),
ErrorKind::CloseDevnull(errno) => Some(*errno),
ErrorKind::TruncatePidfile(errno) => Some(*errno),
ErrorKind::WritePid(errno) => Some(*errno),
ErrorKind::WritePidUnspecifiedError => None,
ErrorKind::Chroot(errno) => Some(*errno),
}
}
}
impl std::fmt::Display for ErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.description())?;
if let Some(errno) = self.errno() {
write!(f, ", errno {}", errno)?
}
Ok(())
}
}
impl std::error::Error for ErrorKind {}
impl std::fmt::Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.kind)
}
}
impl std::error::Error for Error {}
impl From<ErrorKind> for Error {
fn from(kind: ErrorKind) -> Self {
Self { kind }
}
}
pub trait Num {
fn is_err(&self) -> bool;
}
impl Num for i8 {
fn is_err(&self) -> bool {
*self == -1
}
}
impl Num for i16 {
fn | (&self) -> bool {
*self == -1
}
}
impl Num for i32 {
fn is_err(&self) -> bool {
*self == -1
}
}
impl Num for i64 {
fn is_err(&self) -> bool {
*self == -1
}
}
impl Num for isize {
fn is_err(&self) -> bool {
*self == -1
}
}
pub fn check_err<N: Num, F: FnOnce(Errno) -> ErrorKind>(ret: N, f: F) -> Result<N, ErrorKind> {
if ret.is_err() {
Err(f(errno()))
} else {
Ok(ret)
}
}
pub fn errno() -> Errno {
std::io::Error::last_os_error()
.raw_os_error()
.expect("errno")
}
| is_err |
main.go | package main
import (
"fmt"
"log"
"time"
"github.com/enjuus/oriebot/models"
tb "github.com/tucnak/telebot"
)
// Env is the main struct being passed ot all commands
type Env struct {
db models.Datastore
bot *tb.Bot
LastFMAPIKey string
LastFMSecret string
OpenWeatherAPI string
YandexAPI string
ListOfAuth [2]int64
Ticker *time.Ticker
QuitCall chan struct{}
MainChannel *tb.Chat
}
func | () {
b, err := tb.NewBot(tb.Settings{
Token: TGToken,
Poller: &tb.LongPoller{Timeout: 10 * time.Second},
})
db, err := models.NewDB("bot.db")
if err != nil {
log.Panic(err)
}
ticker := time.NewTicker(60 * time.Minute)
quit := make(chan struct{})
env := &Env{db, b, LastFMAPIKey, LastFMSecret, OpenWeatherAPI, YandexAPI, listOfAuth, ticker, quit, &tb.Chat{}}
if err != nil {
log.Fatal(err)
return
}
fmt.Println("running")
b.Handle("/chat", env.HandleChatID)
b.Handle("/quote", env.HandleQuotes)
b.Handle("/lastfm", env.HandleLastFM)
b.Handle("/topalbums", env.HandleLastFMTopAlbums)
b.Handle("/weather", env.HandleWeather)
b.Handle("/uwu", env.HandleUWU)
b.Handle("/spurdo", env.HandleSpurdo)
b.Handle("/blog", env.HandleBlog)
b.Handle("/tl", env.HandleTranslate)
b.Handle("/decide", env.HandleDecide)
b.Handle("/turnips", env.HandleTurnips)
b.Handle("/terms", env.HandleTerms)
b.Handle("/term", env.HandleTerm)
b.Handle("/helth", env.HandleHelth)
b.Handle("/unhelth", env.HandleNoMoreHelth)
b.Handle("/starthelth", env.HandleStartHelth)
b.Handle("/stophelth", env.HandleStopHelth)
b.Handle(tb.OnText, env.HandleTermCount)
b.Start()
}
| main |
doc.go | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Code generated by protoc-gen-go_gapic. DO NOT EDIT.
// Package automl is an auto-generated package for the
// Cloud AutoML API.
//
// Train high-quality custom machine learning models with minimum effort and
// machine learning expertise.
//
// Use of Context
//
// The ctx passed to NewClient is used for authentication requests and
// for creating the underlying connection, but is not used for subsequent calls.
// Individual methods on the client use the ctx given to them.
//
// To close the open connection, use the Close() method.
//
// For information about setting deadlines, reusing contexts, and more
// please visit pkg.go.dev/cloud.google.com/go.
package automl // import "cloud.google.com/go/automl/apiv1"
import (
"context"
"os"
"runtime"
"strconv"
"strings"
"unicode"
"google.golang.org/api/option"
"google.golang.org/grpc/metadata"
)
// For more information on implementing a client constructor hook, see
// https://github.com/googleapis/google-cloud-go/wiki/Customizing-constructors.
type clientHookParams struct{}
type clientHook func(context.Context, clientHookParams) ([]option.ClientOption, error)
const versionClient = "20210506"
func insertMetadata(ctx context.Context, mds ...metadata.MD) context.Context {
out, _ := metadata.FromOutgoingContext(ctx)
out = out.Copy()
for _, md := range mds {
for k, v := range md {
out[k] = append(out[k], v...)
}
}
return metadata.NewOutgoingContext(ctx, out)
}
func checkDisableDeadlines() (bool, error) {
raw, ok := os.LookupEnv("GOOGLE_API_GO_EXPERIMENTAL_DISABLE_DEFAULT_DEADLINE")
if !ok {
return false, nil
}
b, err := strconv.ParseBool(raw)
return b, err
}
// DefaultAuthScopes reports the default set of authentication scopes to use with this package.
func DefaultAuthScopes() []string |
// versionGo returns the Go runtime version. The returned string
// has no whitespace, suitable for reporting in header.
func versionGo() string {
const develPrefix = "devel +"
s := runtime.Version()
if strings.HasPrefix(s, develPrefix) {
s = s[len(develPrefix):]
if p := strings.IndexFunc(s, unicode.IsSpace); p >= 0 {
s = s[:p]
}
return s
}
notSemverRune := func(r rune) bool {
return !strings.ContainsRune("0123456789.", r)
}
if strings.HasPrefix(s, "go1") {
s = s[2:]
var prerelease string
if p := strings.IndexFunc(s, notSemverRune); p >= 0 {
s, prerelease = s[:p], s[p:]
}
if strings.HasSuffix(s, ".") {
s += "0"
} else if strings.Count(s, ".") < 2 {
s += ".0"
}
if prerelease != "" {
s += "-" + prerelease
}
return s
}
return "UNKNOWN"
}
| {
return []string{
"https://www.googleapis.com/auth/cloud-platform",
}
} |
helpers.py | """ collection of functions that are useful for several classes but non-specific to any """
import slicer
import logging
def getBinaryLabelmapRepresentation(segmentationNode, segmentID: str):
segmentLabelmap = slicer.vtkOrientedImageData()
segmentationNode.GetBinaryLabelmapRepresentation(segmentID, segmentLabelmap)
return segmentLabelmap
def getSpecificHeartValveModelNodes(phases: list):
heartValveModelNodes = []
for phase in phases:
try:
heartValveModelNodes.extend(list(getValveModelNodesMatchingPhase(phase)))
except ValueError as exc:
logging.warning(exc)
return heartValveModelNodes
def getSpecificHeartValveModelNodesMatchingPhaseAndType(phases: list, valveType: str, sort:bool=True):
valveModels = []
for valveModel in getAllHeartValveModelNodes():
if valveModel.getValveType() == valveType and getValvePhaseShortName(valveModel) in phases:
valveModels.append(valveModel)
if sort:
return sorted(valveModels, key=lambda valveModel: phases.index(getValvePhaseShortName(valveModel)))
return valveModels
def getSpecificHeartValveMeasurementNodes(identifier):
valveQuantificationLogic = slicer.modules.valvequantification.widgetRepresentation().self().logic
validMeasurementNodes = []
for measurementNode in getAllHeartValveMeasurementNodes():
measurementPreset = valveQuantificationLogic.getMeasurementPresetByMeasurementNode(measurementNode)
if not measurementPreset or measurementPreset.QUANTIFICATION_RESULTS_IDENTIFIER != identifier:
continue
validMeasurementNodes.append(measurementNode)
return validMeasurementNodes
def getFirstValveModelNodeMatchingPhase(phase='MS'):
for valveModelNode in getAllHeartValveModelNodes():
if getValvePhaseShortName(valveModelNode) == phase:
return valveModelNode
raise ValueError("Could not find valve for phase %s" % phase)
def getValveModelNodesMatchingPhase(phase):
for valveModelNode in getAllHeartValveModelNodes():
if getValvePhaseShortName(valveModelNode) == phase:
yield valveModelNode
def getFirstValveModelNodeMatchingPhaseAndType(phase, valveType):
for valveModel in getValveModelNodesMatchingPhase(phase):
if valveModel.getValveType() == valveType:
return valveModel
raise ValueError(f"Could not find valve with type {valveType} for phase {phase}")
def getValveModelNodesMatchingPhaseAndType(phase, valveType):
valveModels = []
for valveModel in getValveModelNodesMatchingPhase(phase):
if valveModel.getValveType() == valveType:
valveModels.append(valveModel)
return valveModels
def getAllHeartValveModelNodes():
import HeartValves
return map(HeartValves.getValveModel, getAllHeartValveNodes())
def getAllHeartValveNodes():
return getAllModuleSpecificScriptableNodes('HeartValve')
def getAllHeartValveMeasurementNodes():
return getAllModuleSpecificScriptableNodes('HeartValveMeasurement')
def getAllModuleSpecificScriptableNodes(moduleName):
return filter(lambda node: node.GetAttribute('ModuleName') == moduleName,
slicer.util.getNodesByClass('vtkMRMLScriptedModuleNode'))
def getHeartValveMeasurementNode(phase):
for measurementNode in getAllHeartValveMeasurementNodes():
cardiacCyclePhaseNames = getMeasurementCardiacCyclePhaseShortNames(measurementNode)
if len(cardiacCyclePhaseNames) == 1 and cardiacCyclePhaseNames[0] == phase:
return measurementNode
def | (measurementNode):
import ValveQuantification
valveQuantificationLogic = ValveQuantification.ValveQuantificationLogic()
return valveQuantificationLogic.getMeasurementCardiacCyclePhaseShortNames(measurementNode)
def getAllFilesWithExtension(directory, extension, file_name_only=False):
import os
import fnmatch
files = []
for root, dirnames, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, '*{}'.format(extension)):
files.append(filename if file_name_only else os.path.join(root, filename))
return files
def isMRBFile(mrb_file):
import os
return os.path.isfile(mrb_file) and mrb_file.lower().endswith(".mrb")
def getValveModelForSegmentationNode(segmentationNode):
for valveModel in getAllHeartValveModelNodes():
if valveModel.getLeafletSegmentationNode() is segmentationNode:
return valveModel
return None
def getValvePhaseShortName(valveModel):
cardiacPhase = valveModel.getCardiacCyclePhase()
cardiacCyclePhasePreset = valveModel.cardiacCyclePhasePresets[cardiacPhase]
return cardiacCyclePhasePreset['shortname'] | getMeasurementCardiacCyclePhaseShortNames |
lib.rs | //! [![github]](https://github.com/dtolnay/syn) [![crates-io]](https://crates.io/crates/syn) [![docs-rs]](crate)
//!
//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logoColor=white&logo=data:image/svg+xml;base64,PHN2ZyByb2xlPSJpbWciIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyIgdmlld0JveD0iMCAwIDUxMiA1MTIiPjxwYXRoIGZpbGw9IiNmNWY1ZjUiIGQ9Ik00ODguNiAyNTAuMkwzOTIgMjE0VjEwNS41YzAtMTUtOS4zLTI4LjQtMjMuNC0zMy43bC0xMDAtMzcuNWMtOC4xLTMuMS0xNy4xLTMuMS0yNS4zIDBsLTEwMCAzNy41Yy0xNC4xIDUuMy0yMy40IDE4LjctMjMuNCAzMy43VjIxNGwtOTYuNiAzNi4yQzkuMyAyNTUuNSAwIDI2OC45IDAgMjgzLjlWMzk0YzAgMTMuNiA3LjcgMjYuMSAxOS45IDMyLjJsMTAwIDUwYzEwLjEgNS4xIDIyLjEgNS4xIDMyLjIgMGwxMDMuOS01MiAxMDMuOSA1MmMxMC4xIDUuMSAyMi4xIDUuMSAzMi4yIDBsMTAwLTUwYzEyLjItNi4xIDE5LjktMTguNiAxOS45LTMyLjJWMjgzLjljMC0xNS05LjMtMjguNC0yMy40LTMzLjd6TTM1OCAyMTQuOGwtODUgMzEuOXYtNjguMmw4NS0zN3Y3My4zek0xNTQgMTA0LjFsMTAyLTM4LjIgMTAyIDM4LjJ2LjZsLTEwMiA0MS40LTEwMi00MS40di0uNnptODQgMjkxLjFsLTg1IDQyLjV2LTc5LjFsODUtMzguOHY3NS40em0wLTExMmwtMTAyIDQxLjQtMTAyLTQxLjR2LS42bDEwMi0zOC4yIDEwMiAzOC4ydi42em0yNDAgMTEybC04NSA0Mi41di03OS4xbDg1LTM4Ljh2NzUuNHptMC0xMTJsLTEwMiA0MS40LTEwMi00MS40di0uNmwxMDItMzguMiAxMDIgMzguMnYuNnoiPjwvcGF0aD48L3N2Zz4K
//!
//! <br>
//!
//! Syn is a parsing library for parsing a stream of Rust tokens into a syntax
//! tree of Rust source code.
//!
//! Currently this library is geared toward use in Rust procedural macros, but
//! contains some APIs that may be useful more generally.
//!
//! - **Data structures** — Syn provides a complete syntax tree that can
//! represent any valid Rust source code. The syntax tree is rooted at
//! [`syn::File`] which represents a full source file, but there are other
//! entry points that may be useful to procedural macros including
//! [`syn::Item`], [`syn::Expr`] and [`syn::Type`].
//!
//! - **Derives** — Of particular interest to derive macros is
//! [`syn::DeriveInput`] which is any of the three legal input items to a
//! derive macro. An example below shows using this type in a library that can
//! derive implementations of a user-defined trait.
//!
//! - **Parsing** — Parsing in Syn is built around [parser functions] with the
//! signature `fn(ParseStream) -> Result<T>`. Every syntax tree node defined
//! by Syn is individually parsable and may be used as a building block for
//! custom syntaxes, or you may dream up your own brand new syntax without
//! involving any of our syntax tree types.
//!
//! - **Location information** — Every token parsed by Syn is associated with a
//! `Span` that tracks line and column information back to the source of that
//! token. These spans allow a procedural macro to display detailed error
//! messages pointing to all the right places in the user's code. There is an
//! example of this below.
//!
//! - **Feature flags** — Functionality is aggressively feature gated so your
//! procedural macros enable only what they need, and do not pay in compile
//! time for all the rest.
//!
//! [`syn::File`]: File
//! [`syn::Item`]: Item
//! [`syn::Expr`]: Expr
//! [`syn::Type`]: Type
//! [`syn::DeriveInput`]: DeriveInput
//! [parser functions]: mod@parse
//!
//! <br>
//!
//! # Example of a derive macro
//!
//! The canonical derive macro using Syn looks like this. We write an ordinary
//! Rust function tagged with a `proc_macro_derive` attribute and the name of
//! the trait we are deriving. Any time that derive appears in the user's code,
//! the Rust compiler passes their data structure as tokens into our macro. We
//! get to execute arbitrary Rust code to figure out what to do with those
//! tokens, then hand some tokens back to the compiler to compile into the
//! user's crate.
//!
//! [`TokenStream`]: proc_macro::TokenStream
//!
//! ```toml
//! [dependencies]
//! syn = "1.0"
//! quote = "1.0"
//!
//! [lib]
//! proc-macro = true
//! ```
//!
//! ```
//! # extern crate proc_macro;
//! #
//! use proc_macro::TokenStream;
//! use quote::quote;
//! use syn::{parse_macro_input, DeriveInput};
//!
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[proc_macro_derive(MyMacro)]
//! # };
//! pub fn my_macro(input: TokenStream) -> TokenStream {
//! // Parse the input tokens into a syntax tree
//! let input = parse_macro_input!(input as DeriveInput);
//!
//! // Build the output, possibly using quasi-quotation
//! let expanded = quote! {
//! // ...
//! };
//!
//! // Hand the output tokens back to the compiler
//! TokenStream::from(expanded)
//! }
//! ```
//!
//! The [`heapsize`] example directory shows a complete working implementation
//! of a derive macro. It works on any Rust compiler 1.31+. The example derives
//! a `HeapSize` trait which computes an estimate of the amount of heap memory
//! owned by a value.
//!
//! [`heapsize`]: https://github.com/dtolnay/syn/tree/master/examples/heapsize
//!
//! ```
//! pub trait HeapSize {
//! /// Total number of bytes of heap memory owned by `self`.
//! fn heap_size_of_children(&self) -> usize;
//! }
//! ```
//!
//! The derive macro allows users to write `#[derive(HeapSize)]` on data
//! structures in their program.
//!
//! ```
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[derive(HeapSize)]
//! # };
//! struct Demo<'a, T: ?Sized> {
//! a: Box<T>,
//! b: u8,
//! c: &'a str,
//! d: String,
//! }
//! ```
//!
//! <p><br></p>
//!
//! # Spans and error reporting
//!
//! The token-based procedural macro API provides great control over where the
//! compiler's error messages are displayed in user code. Consider the error the
//! user sees if one of their field types does not implement `HeapSize`.
//!
//! ```
//! # const IGNORE_TOKENS: &str = stringify! {
//! #[derive(HeapSize)]
//! # };
//! struct Broken {
//! ok: String,
//! bad: std::thread::Thread,
//! }
//! ```
//!
//! By tracking span information all the way through the expansion of a
//! procedural macro as shown in the `heapsize` example, token-based macros in
//! Syn are able to trigger errors that directly pinpoint the source of the
//! problem.
//!
//! ```text
//! error[E0277]: the trait bound `std::thread::Thread: HeapSize` is not satisfied
//! --> src/main.rs:7:5
//! |
//! 7 | bad: std::thread::Thread,
//! | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `HeapSize` is not implemented for `Thread`
//! ```
//!
//! <br>
//!
//! # Parsing a custom syntax
//!
//! The [`lazy-static`] example directory shows the implementation of a
//! `functionlike!(...)` procedural macro in which the input tokens are parsed
//! using Syn's parsing API.
//!
//! [`lazy-static`]: https://github.com/dtolnay/syn/tree/master/examples/lazy-static
//!
//! The example reimplements the popular `lazy_static` crate from crates.io as a
//! procedural macro.
//!
//! ```
//! # macro_rules! lazy_static {
//! # ($($tt:tt)*) => {}
//! # }
//! #
//! lazy_static! {
//! static ref USERNAME: Regex = Regex::new("^[a-z0-9_-]{3,16}$").unwrap();
//! }
//! ```
//!
//! The implementation shows how to trigger custom warnings and error messages
//! on the macro input.
//!
//! ```text
//! warning: come on, pick a more creative name
//! --> src/main.rs:10:16
//! |
//! 10 | static ref FOO: String = "lazy_static".to_owned();
//! | ^^^
//! ```
//!
//! <br>
//!
//! # Testing
//!
//! When testing macros, we often care not just that the macro can be used
//! successfully but also that when the macro is provided with invalid input it
//! produces maximally helpful error messages. Consider using the [`trybuild`]
//! crate to write tests for errors that are emitted by your macro or errors
//! detected by the Rust compiler in the expanded code following misuse of the
//! macro. Such tests help avoid regressions from later refactors that
//! mistakenly make an error no longer trigger or be less helpful than it used
//! to be.
//!
//! [`trybuild`]: https://github.com/dtolnay/trybuild
//!
//! <br>
//!
//! # Debugging
//!
//! When developing a procedural macro it can be helpful to look at what the
//! generated code looks like. Use `cargo rustc -- -Zunstable-options
//! --pretty=expanded` or the [`cargo expand`] subcommand.
//!
//! [`cargo expand`]: https://github.com/dtolnay/cargo-expand
//!
//! To show the expanded code for some crate that uses your procedural macro,
//! run `cargo expand` from that crate. To show the expanded code for one of
//! your own test cases, run `cargo expand --test the_test_case` where the last
//! argument is the name of the test file without the `.rs` extension.
//!
//! This write-up by Brandon W Maister discusses debugging in more detail:
//! [Debugging Rust's new Custom Derive system][debugging].
//!
//! [debugging]: https://quodlibetor.github.io/posts/debugging-rusts-new-custom-derive-system/
//!
//! <br>
//!
//! # Optional features
//!
//! Syn puts a lot of functionality behind optional features in order to
//! optimize compile time for the most common use cases. The following features
//! are available.
//!
//! - **`derive`** *(enabled by default)* — Data structures for representing the
//! possible input to a derive macro, including structs and enums and types.
//! - **`full`** — Data structures for representing the syntax tree of all valid
//! Rust source code, including items and expressions.
//! - **`parsing`** *(enabled by default)* — Ability to parse input tokens into
//! a syntax tree node of a chosen type.
//! - **`printing`** *(enabled by default)* — Ability to print a syntax tree
//! node as tokens of Rust source code.
//! - **`visit`** — Trait for traversing a syntax tree.
//! - **`visit-mut`** — Trait for traversing and mutating in place a syntax
//! tree.
//! - **`fold`** — Trait for transforming an owned syntax tree.
//! - **`clone-impls`** *(enabled by default)* — Clone impls for all syntax tree
//! types.
//! - **`extra-traits`** — Debug, Eq, PartialEq, Hash impls for all syntax tree
//! types.
//! - **`proc-macro`** *(enabled by default)* — Runtime dependency on the
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/1.0.53")]
#![cfg_attr(doc_cfg, feature(doc_cfg))]
#![deny(clippy::all, clippy::pedantic)]
// Ignored clippy lints.
#![allow(
clippy::blocks_in_if_conditions,
clippy::cognitive_complexity,
clippy::doc_markdown,
clippy::eval_order_dependence,
clippy::inherent_to_string,
clippy::large_enum_variant,
clippy::manual_non_exhaustive,
clippy::manual_strip,
clippy::match_like_matches_macro,
clippy::match_on_vec_items,
clippy::needless_doctest_main,
clippy::needless_pass_by_value,
clippy::never_loop,
clippy::suspicious_op_assign_impl,
clippy::too_many_arguments,
clippy::trivially_copy_pass_by_ref,
clippy::unnecessary_unwrap
)]
// Ignored clippy_pedantic lints.
#![allow(
clippy::cast_possible_truncation,
clippy::default_trait_access,
clippy::empty_enum,
clippy::expl_impl_clone_on_copy,
clippy::if_not_else,
clippy::items_after_statements,
clippy::match_same_arms,
clippy::missing_errors_doc,
clippy::module_name_repetitions,
clippy::must_use_candidate,
clippy::option_if_let_else,
clippy::shadow_unrelated,
clippy::similar_names,
clippy::single_match_else,
clippy::too_many_lines,
clippy::unseparated_literal_suffix,
clippy::use_self,
clippy::used_underscore_binding,
clippy::wildcard_imports
)]
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "proc-macro"
))]
extern crate proc_macro;
extern crate proc_macro2;
extern crate unicode_xid;
#[cfg(feature = "printing")]
extern crate quote;
#[macro_use]
mod macros;
// Not public API.
#[cfg(feature = "parsing")]
#[doc(hidden)]
#[macro_use]
pub mod group;
#[macro_use]
pub mod token;
mod ident;
pub use crate::ident::Ident;
#[cfg(any(feature = "full", feature = "derive"))]
mod attr;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::attr::{
AttrStyle, Attribute, AttributeArgs, Meta, MetaList, MetaNameValue, NestedMeta,
};
mod bigint;
#[cfg(any(feature = "full", feature = "derive"))]
mod data;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::data::{
Field, Fields, FieldsNamed, FieldsUnnamed, Variant, VisCrate, VisPublic, VisRestricted,
Visibility,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod expr;
#[cfg(feature = "full")]
pub use crate::expr::{
Arm, FieldValue, GenericMethodArgument, Label, MethodTurbofish, RangeLimits,
};
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::expr::{
Expr, ExprArray, ExprAssign, ExprAssignOp, ExprAsync, ExprAwait, ExprBinary, ExprBlock,
ExprBox, ExprBreak, ExprCall, ExprCast, ExprClosure, ExprContinue, ExprField, ExprForLoop,
ExprGroup, ExprIf, ExprIndex, ExprLet, ExprLit, ExprLoop, ExprMacro, ExprMatch, ExprMethodCall,
ExprParen, ExprPath, ExprRange, ExprReference, ExprRepeat, ExprReturn, ExprStruct, ExprTry,
ExprTryBlock, ExprTuple, ExprType, ExprUnary, ExprUnsafe, ExprWhile, ExprYield, Index, Member,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod generics;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::generics::{
BoundLifetimes, ConstParam, GenericParam, Generics, LifetimeDef, PredicateEq,
PredicateLifetime, PredicateType, TraitBound, TraitBoundModifier, TypeParam, TypeParamBound,
WhereClause, WherePredicate,
};
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
pub use crate::generics::{ImplGenerics, Turbofish, TypeGenerics};
#[cfg(feature = "full")]
mod item;
#[cfg(feature = "full")]
pub use crate::item::{
FnArg, ForeignItem, ForeignItemFn, ForeignItemMacro, ForeignItemStatic, ForeignItemType,
ImplItem, ImplItemConst, ImplItemMacro, ImplItemMethod, ImplItemType, Item, ItemConst,
ItemEnum, ItemExternCrate, ItemFn, ItemForeignMod, ItemImpl, ItemMacro, ItemMacro2, ItemMod,
ItemStatic, ItemStruct, ItemTrait, ItemTraitAlias, ItemType, ItemUnion, ItemUse, Receiver,
Signature, TraitItem, TraitItemConst, TraitItemMacro, TraitItemMethod, TraitItemType, UseGlob,
UseGroup, UseName, UsePath, UseRename, UseTree,
};
#[cfg(feature = "full")]
mod file;
#[cfg(feature = "full")]
pub use crate::file::File;
mod lifetime;
pub use crate::lifetime::Lifetime;
mod lit;
pub use crate::lit::{
Lit, LitBool, LitByte, LitByteStr, LitChar, LitFloat, LitInt, LitStr, StrStyle,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod mac;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::mac::{Macro, MacroDelimiter};
#[cfg(any(feature = "full", feature = "derive"))]
mod derive;
#[cfg(feature = "derive")]
pub use crate::derive::{Data, DataEnum, DataStruct, DataUnion, DeriveInput};
#[cfg(any(feature = "full", feature = "derive"))]
mod op;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::op::{BinOp, UnOp};
#[cfg(feature = "full")]
mod stmt;
#[cfg(feature = "full")]
pub use crate::stmt::{Block, Local, Stmt};
#[cfg(any(feature = "full", feature = "derive"))]
mod ty;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::ty::{
Abi, BareFnArg, ReturnType, Type, TypeArray, TypeBareFn, TypeGroup, TypeImplTrait, TypeInfer,
TypeMacro, TypeNever, TypeParen, TypePath, TypePtr, TypeReference, TypeSlice, TypeTraitObject,
TypeTuple, Variadic,
};
#[cfg(feature = "full")]
mod pat;
#[cfg(feature = "full")]
pub use crate::pat::{
FieldPat, Pat, PatBox, PatIdent, PatLit, PatMacro, PatOr, PatPath, PatRange, PatReference,
PatRest, PatSlice, PatStruct, PatTuple, PatTupleStruct, PatType, PatWild,
};
#[cfg(any(feature = "full", feature = "derive"))]
mod path;
#[cfg(any(feature = "full", feature = "derive"))]
pub use crate::path::{
AngleBracketedGenericArguments, Binding, Constraint, GenericArgument,
ParenthesizedGenericArguments, Path, PathArguments, PathSegment, QSelf,
};
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod buffer;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod ext;
pub mod punctuated;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "extra-traits"))]
mod tt;
// Not public API except the `parse_quote!` macro.
#[cfg(feature = "parsing")]
#[doc(hidden)]
pub mod parse_quote;
// Not public API except the `parse_macro_input!` macro.
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "parsing",
feature = "proc-macro"
))]
#[doc(hidden)]
pub mod parse_macro_input;
#[cfg(all(feature = "parsing", feature = "printing"))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "printing"))))]
pub mod spanned;
#[cfg(all(feature = "parsing", feature = "full"))]
mod whitespace;
mod gen {
/// Syntax tree traversal to walk a shared borrow of a syntax tree.
///
/// Each method of the [`Visit`] trait is a hook that can be overridden to
/// customize the behavior when visiting the corresponding type of node. By
/// default, every method recursively visits the substructure of the input
/// by invoking the right visitor method of each of its fields.
///
/// [`Visit`]: visit::Visit
///
/// ```
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Visit<'ast> {
/// /* ... */
///
/// fn visit_expr_binary(&mut self, node: &'ast ExprBinary) {
/// visit_expr_binary(self, node);
/// }
///
/// /* ... */
/// # fn visit_attribute(&mut self, node: &'ast Attribute);
/// # fn visit_expr(&mut self, node: &'ast Expr);
/// # fn visit_bin_op(&mut self, node: &'ast BinOp);
/// }
///
/// pub fn visit_expr_binary<'ast, V>(v: &mut V, node: &'ast ExprBinary)
/// where
/// V: Visit<'ast> + ?Sized,
/// {
/// for attr in &node.attrs {
/// v.visit_attribute(attr);
/// }
/// v.visit_expr(&*node.left);
/// v.visit_bin_op(&node.op);
/// v.visit_expr(&*node.right);
/// }
///
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"visit"` feature.*
///
/// <br>
///
/// # Example
///
/// This visitor will print the name of every freestanding function in the
/// syntax tree, including nested functions.
///
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "1.0", features = ["full", "visit"] }
///
/// use quote::quote;
/// use syn::visit::{self, Visit};
/// use syn::{File, ItemFn};
///
/// struct FnVisitor;
///
/// impl<'ast> Visit<'ast> for FnVisitor {
/// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
/// println!("Function with name={}", node.sig.ident);
///
/// // Delegate to the default impl to visit any nested functions.
/// visit::visit_item_fn(self, node);
/// }
/// }
///
/// fn main() {
/// let code = quote! {
/// pub fn f() {
/// fn g() {}
/// }
/// };
///
/// let syntax_tree: File = syn::parse2(code).unwrap();
/// FnVisitor.visit_file(&syntax_tree);
/// }
/// ```
///
/// The `'ast` lifetime on the input references means that the syntax tree
/// outlives the complete recursive visit call, so the visitor is allowed to
/// hold on to references into the syntax tree.
///
/// ```
/// use quote::quote;
/// use syn::visit::{self, Visit};
/// use syn::{File, ItemFn};
///
/// struct FnVisitor<'ast> {
/// functions: Vec<&'ast ItemFn>,
/// }
///
/// impl<'ast> Visit<'ast> for FnVisitor<'ast> {
/// fn visit_item_fn(&mut self, node: &'ast ItemFn) {
/// self.functions.push(node);
/// visit::visit_item_fn(self, node);
/// }
/// }
///
/// fn main() {
/// let code = quote! {
/// pub fn f() {
/// fn g() {}
/// }
/// };
///
/// let syntax_tree: File = syn::parse2(code).unwrap();
/// let mut visitor = FnVisitor { functions: Vec::new() };
/// visitor.visit_file(&syntax_tree);
/// for f in visitor.functions {
/// println!("Function with name={}", f.sig.ident);
/// }
/// }
/// ```
#[cfg(feature = "visit")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit")))]
#[rustfmt::skip]
pub mod visit;
/// Syntax tree traversal to mutate an exclusive borrow of a syntax tree in
/// place.
///
/// Each method of the [`VisitMut`] trait is a hook that can be overridden
/// to customize the behavior when mutating the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`VisitMut`]: visit_mut::VisitMut
///
/// ```
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait VisitMut {
/// /* ... */
///
/// fn visit_expr_binary_mut(&mut self, node: &mut ExprBinary) {
/// visit_expr_binary_mut(self, node);
/// }
///
/// /* ... */
/// # fn visit_attribute_mut(&mut self, node: &mut Attribute);
/// # fn visit_expr_mut(&mut self, node: &mut Expr);
/// # fn visit_bin_op_mut(&mut self, node: &mut BinOp);
/// }
///
/// pub fn visit_expr_binary_mut<V>(v: &mut V, node: &mut ExprBinary)
/// where
/// V: VisitMut + ?Sized,
/// {
/// for attr in &mut node.attrs {
/// v.visit_attribute_mut(attr);
/// }
/// v.visit_expr_mut(&mut *node.left);
/// v.visit_bin_op_mut(&mut node.op);
/// v.visit_expr_mut(&mut *node.right);
/// }
///
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"visit-mut"`
/// feature.*
///
/// <br>
///
/// # Example
///
/// This mut visitor replace occurrences of u256 suffixed integer literals
/// like `999u256` with a macro invocation `bigint::u256!(999)`.
///
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "1.0", features = ["full", "visit-mut"] }
///
/// use quote::quote;
/// use syn::visit_mut::{self, VisitMut};
/// use syn::{parse_quote, Expr, File, Lit, LitInt};
///
/// struct BigintReplace;
///
/// impl VisitMut for BigintReplace {
/// fn visit_expr_mut(&mut self, node: &mut Expr) {
/// if let Expr::Lit(expr) = &node {
/// if let Lit::Int(int) = &expr.lit {
/// if int.suffix() == "u256" {
/// let digits = int.base10_digits();
/// let unsuffixed: LitInt = syn::parse_str(digits).unwrap();
/// *node = parse_quote!(bigint::u256!(#unsuffixed));
/// return;
/// }
/// }
/// }
///
/// // Delegate to the default impl to visit nested expressions.
/// visit_mut::visit_expr_mut(self, node);
/// }
/// }
///
/// fn main() {
/// let code = quote! {
/// fn main() {
/// let _ = 999u256;
/// }
/// };
///
/// let mut syntax_tree: File = syn::parse2(code).unwrap();
/// BigintReplace.visit_file_mut(&mut syntax_tree);
/// println!("{}", quote!(#syntax_tree));
/// }
/// ```
#[cfg(feature = "visit-mut")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "visit-mut")))]
#[rustfmt::skip]
pub mod visit_mut;
/// Syntax tree traversal to transform the nodes of an owned syntax tree.
///
/// Each method of the [`Fold`] trait is a hook that can be overridden to
/// customize the behavior when transforming the corresponding type of node.
/// By default, every method recursively visits the substructure of the
/// input by invoking the right visitor method of each of its fields.
///
/// [`Fold`]: fold::Fold
///
/// ```
/// # use syn::{Attribute, BinOp, Expr, ExprBinary};
/// #
/// pub trait Fold {
/// /* ... */
///
/// fn fold_expr_binary(&mut self, node: ExprBinary) -> ExprBinary {
/// fold_expr_binary(self, node)
/// }
///
/// /* ... */
/// # fn fold_attribute(&mut self, node: Attribute) -> Attribute;
/// # fn fold_expr(&mut self, node: Expr) -> Expr;
/// # fn fold_bin_op(&mut self, node: BinOp) -> BinOp;
/// }
///
/// pub fn fold_expr_binary<V>(v: &mut V, node: ExprBinary) -> ExprBinary
/// where
/// V: Fold + ?Sized,
/// {
/// ExprBinary {
/// attrs: node
/// .attrs
/// .into_iter()
/// .map(|attr| v.fold_attribute(attr))
/// .collect(),
/// left: Box::new(v.fold_expr(*node.left)),
/// op: v.fold_bin_op(node.op),
/// right: Box::new(v.fold_expr(*node.right)),
/// }
/// }
///
/// /* ... */
/// ```
///
/// *This module is available only if Syn is built with the `"fold"` feature.*
///
/// <br>
///
/// # Example
///
/// This fold inserts parentheses to fully parenthesizes any expression.
///
/// ```
/// // [dependencies]
/// // quote = "1.0"
/// // syn = { version = "1.0", features = ["fold", "full"] }
///
/// use quote::quote;
/// use syn::fold::{fold_expr, Fold};
/// use syn::{token, Expr, ExprParen};
///
/// struct ParenthesizeEveryExpr;
///
/// impl Fold for ParenthesizeEveryExpr {
/// fn fold_expr(&mut self, expr: Expr) -> Expr {
/// Expr::Paren(ExprParen {
/// attrs: Vec::new(),
/// expr: Box::new(fold_expr(self, expr)),
/// paren_token: token::Paren::default(),
/// })
/// }
/// }
///
/// fn main() {
/// let code = quote! { a() + b(1) * c.d };
/// let expr: Expr = syn::parse2(code).unwrap();
/// let parenthesized = ParenthesizeEveryExpr.fold_expr(expr);
/// println!("{}", quote!(#parenthesized));
///
/// // Output: (((a)()) + (((b)((1))) * ((c).d)))
/// }
/// ```
#[cfg(feature = "fold")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "fold")))]
#[rustfmt::skip]
pub mod fold;
#[cfg(feature = "clone-impls")]
#[rustfmt::skip]
mod clone;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod eq;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod hash;
#[cfg(feature = "extra-traits")]
#[rustfmt::skip]
mod debug;
#[cfg(any(feature = "full", feature = "derive"))]
#[path = "../gen_helper.rs"]
mod helper;
}
pub use crate::gen::*;
// Not public API.
#[doc(hidden)]
pub mod export;
mod custom_keyword;
mod custom_punctuation;
mod sealed;
mod span;
mod thread;
#[cfg(feature = "parsing")]
mod lookahead;
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub mod parse;
#[cfg(feature = "full")]
mod reserved;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "parsing"))]
mod verbatim;
#[cfg(all(any(feature = "full", feature = "derive"), feature = "printing"))]
mod print;
////////////////////////////////////////////////////////////////////////////////
#[allow(dead_code, non_camel_case_types)]
struct private;
// https://github.com/rust-lang/rust/issues/62830
#[cfg(feature = "parsing")]
mod rustdoc_workaround {
pub use crate::parse::{self as parse_module};
}
////////////////////////////////////////////////////////////////////////////////
mod error;
pub use crate::error::{Error, Result};
/// Parse tokens of source code into the chosen syntax tree node.
///
/// This is preferred over parsing a string because tokens are able to preserve
/// information about where in the user's code they were originally written (the
/// "span" of the token), possibly allowing the compiler to produce better error
/// messages.
///
/// This function parses a `proc_macro::TokenStream` which is the type used for
/// interop with the compiler in a procedural macro. To parse a
/// `proc_macro2::TokenStream`, use [`syn::parse2`] instead.
///
/// [`syn::parse2`]: parse2
///
/// *This function is available only if Syn is built with both the `"parsing"` and
/// `"proc-macro"` features.*
///
/// # Examples
///
/// ```
/// # extern crate proc_macro;
/// #
/// use proc_macro::TokenStream;
/// use quote::quote;
/// use syn::DeriveInput;
///
/// # const IGNORE_TOKENS: &str = stringify! {
/// #[proc_macro_derive(MyMacro)]
/// # };
/// pub fn my_macro(input: TokenStream) -> TokenStream {
/// // Parse the tokens into a syntax tree
/// let ast: DeriveInput = syn::parse(input).unwrap();
///
/// // Build the output, possibly using quasi-quotation
/// let expanded = quote! {
/// /* ... */
/// };
///
/// // Convert into a token stream and return it
/// expanded.into()
/// }
/// ```
#[cfg(all(
not(all(target_arch = "wasm32", any(target_os = "unknown", target_os = "wasi"))),
feature = "parsing",
feature = "proc-macro"
))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "proc-macro"))))]
pub fn parse<T: parse::Parse>(tokens: proc_macro::TokenStream) -> Result<T> {
parse::Parser::parse(T::parse, tokens)
}
/// Parse a proc-macro2 token stream into the chosen syntax tree node.
///
/// This function parses a `proc_macro2::TokenStream` which is commonly useful
/// when the input comes from a node of the Syn syntax tree, for example the
/// body tokens of a [`Macro`] node. When in a procedural macro parsing the
/// `proc_macro::TokenStream` provided by the compiler, use [`syn::parse`]
/// instead.
///
/// [`syn::parse`]: parse()
///
/// *This function is available only if Syn is built with the `"parsing"` feature.*
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse2<T: parse::Parse>(tokens: proc_macro2::TokenStream) -> Result<T> {
parse::Parser::parse2(T::parse, tokens)
}
/// Parse a string of Rust code into the chosen syntax tree node.
///
/// *This function is available only if Syn is built with the `"parsing"` feature.*
///
/// # Hygiene
///
/// Every span in the resulting syntax tree will be set to resolve at the macro
/// call site.
///
/// # Examples
///
/// ```
/// use syn::{Expr, Result};
///
/// fn run() -> Result<()> {
/// let code = "assert_eq!(u8::max_value(), 255)";
/// let expr = syn::parse_str::<Expr>(code)?;
/// println!("{:#?}", expr);
/// Ok(())
/// }
/// #
/// # run().unwrap();
/// ```
#[cfg(feature = "parsing")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "parsing")))]
pub fn parse_str<T: parse::Parse>(s: &str) -> Result<T> {
parse::Parser::parse_str(T::parse, s)
}
// FIXME the name parse_file makes it sound like you might pass in a path to a
// file, rather than the content.
/// Parse the content of a file of Rust code.
///
/// This is different from `syn::parse_str::<File>(content)` in two ways:
///
/// - It discards a leading byte order mark `\u{FEFF}` if the file has one.
/// - It preserves the shebang line of the file, such as `#!/usr/bin/env rustx`.
///
/// If present, either of these would be an error using `from_str`.
///
/// *This function is available only if Syn is built with the `"parsing"` and
/// `"full"` features.*
///
/// # Examples
///
/// ```no_run
/// use std::error::Error;
/// use std::fs::File;
/// use std::io::Read;
///
/// fn run() -> Result<(), Box<Error>> {
/// let mut file = File::open("path/to/code.rs")?;
/// let mut content = String::new();
/// file.read_to_string(&mut content)?;
///
/// let ast = syn::parse_file(&content)?;
/// if let Some(shebang) = ast.shebang {
/// println!("{}", shebang);
/// }
/// println!("{} items", ast.items.len());
///
/// Ok(())
/// }
/// #
/// # run().unwrap();
/// ```
#[cfg(all(feature = "parsing", feature = "full"))]
#[cfg_attr(doc_cfg, doc(cfg(all(feature = "parsing", feature = "full"))))]
pub fn parse_file(mut content: &str) | File> {
// Strip the BOM if it is present
const BOM: &str = "\u{feff}";
if content.starts_with(BOM) {
content = &content[BOM.len()..];
}
let mut shebang = None;
if content.starts_with("#!") {
let rest = whitespace::skip(&content[2..]);
if !rest.starts_with('[') {
if let Some(idx) = content.find('\n') {
shebang = Some(content[..idx].to_string());
content = &content[idx..];
} else {
shebang = Some(content.to_string());
content = "";
}
}
}
let mut file: File = parse_str(content)?;
file.shebang = shebang;
Ok(file)
}
| -> Result< |
wordlebot.rs | use std::hash::Hash;
use std::{cmp, fs, iter};
use hashbrown::{HashMap, HashSet};
use itertools::Itertools;
use rayon::prelude::*;
use structopt::StructOpt;
fn _score<const N: usize>(guess: &Word<N>, answer: &Word<N>) -> Score<N> {
let len = answer.len();
assert_eq!(len, guess.len());
assert_eq!(len, answer.len());
let mut result: Score<N> = [0; N];
let mut remaining = *answer;
for (i, (g, a)) in guess.iter().zip(answer.iter()).enumerate() {
if g == a {
result[i] = 3;
remaining[i] = ' ';
}
}
for (i, g) in guess.iter().enumerate() {
if result[i] == 3 {
continue;
}
match remaining.iter().position(|v| v == g) {
Some(j) => {
result[i] = 2;
remaining[j] = ' ';
}
None => {
result[i] = 1;
}
}
}
result
}
impl<T: ?Sized> MyItertools for T where T: Iterator {}
trait MyItertools: Iterator {
fn fast_counts(self) -> HashMap<Self::Item, usize>
where
Self: Sized,
Self::Item: Eq + Hash,
{
let mut result = HashMap::new();
self.for_each(|item| *result.entry(item).or_insert(0) += 1);
result
}
}
fn _entropy<T>(distribution: &HashMap<T, usize>) -> u64 {
let denominator = distribution.values().sum::<usize>() as f64;
let result = distribution
.values()
.map(|v| {
let p = *v as f64 / denominator;
p * -f64::log2(p)
})
.sum::<f64>();
(1_000_000_000.0 * result) as u64
}
fn _min_surprise<T>(distribution: &HashMap<T, usize>) -> Option<u64> {
let numerator = *distribution.values().max()? as f64;
let denominator = distribution.values().sum::<usize>() as f64;
Some((1_000_000_000.0 * -f64::log2(numerator / denominator)) as u64)
}
#[derive(Debug)]
struct Constraint<const N: usize> {
permitted: [HashSet<char>; N],
lo: HashMap<char, usize>,
hi: HashMap<char, usize>,
}
impl<const N: usize> Constraint<N> {
fn new() -> Self {
let permitted = iter::repeat(
"abcdefghijklmnopqrstuvwxyz"
.chars()
.collect::<HashSet<char>>(),
)
.take(N)
.collect::<Vec<HashSet<char>>>()
.try_into()
.unwrap();
Self {
permitted,
lo: HashMap::new(),
hi: HashMap::new(),
}
}
fn from_clues(clues: &[(Word<N>, Score<N>)]) -> Self {
let mut result = Self::new();
if clues.is_empty() {
return result;
}
for (guess, score) in clues {
result.update(guess, score);
}
result
}
fn update(&mut self, guess: &Word<N>, score: &Score<N>) {
let mut required = HashSet::new();
for (i, (g, s)) in guess.iter().zip(score.iter()).enumerate() {
match s {
1 => {
self.permitted[i].remove(g);
if !required.contains(&g) {
for p in self.permitted.iter_mut() {
p.remove(g);
}
}
}
2 => {
self.permitted[i].remove(g);
required.insert(g);
}
3 => {
self.permitted[i].clear();
self.permitted[i].insert(*g);
required.insert(g);
}
_ => {
panic!("Invalid score {:?}", score);
}
}
}
let positive = guess
.iter()
.zip(score.iter())
.filter_map(|(g, s)| match s {
2 => Some(g),
3 => Some(g),
_ => None,
})
.fast_counts();
let negative = guess
.iter()
.zip(score.iter())
.filter_map(|(g, s)| match s {
1 => Some(g),
_ => None,
})
.fast_counts();
for (k, v) in positive {
let lo = self.lo.entry(*k).or_insert(0);
*lo = cmp::max(*lo, v);
if negative.contains_key(&k) {
let hi = self.hi.entry(*k).or_insert(5);
*hi = cmp::min(*hi, v);
}
}
}
fn permits(&self, answer: &Word<N>) -> bool {
for (a, p) in answer.iter().zip(&self.permitted) {
if !p.contains(a) {
return false;
}
}
let counts = answer.iter().fast_counts();
for (k, lo) in self.lo.iter() {
match counts.get(k) {
Some(v) if lo <= v => continue,
_ => return false, | }
for (k, hi) in self.hi.iter() {
match counts.get(k) {
Some(v) if v <= hi => continue,
_ => return false,
}
}
true
}
}
type Word<const N: usize> = [char; N];
type Score<const N: usize> = [u8; N];
struct Bot<const N: usize> {
allowed_guesses: Vec<Word<N>>,
allowed_answers: Vec<Word<N>>,
adversarial: bool,
cache: HashMap<Vec<(Word<N>, Score<N>)>, Word<N>>,
num_cache_hit: usize,
}
impl<const N: usize> Bot<N> {
fn new(guesses: Vec<Word<N>>, answers: Vec<Word<N>>, adversarial: bool) -> Self {
Self {
allowed_guesses: guesses
.into_iter()
.chain(answers.iter().cloned())
.unique()
.collect(),
allowed_answers: answers,
adversarial,
cache: HashMap::new(),
num_cache_hit: 0,
}
}
fn choice(&mut self, clues: &[(Word<N>, Score<N>)]) -> Option<Word<N>> {
if let Some(result) = self.cache.get(clues) {
self.num_cache_hit += 1;
return Some(*result);
}
let constraint = Constraint::from_clues(clues);
let plausible_answers: Vec<&Word<N>> = self
.allowed_answers
.iter()
.filter(|a| constraint.permits(a))
.collect();
// Before the ordering accounted for plausible answers this reduced the number of guesses.
// Now it is only an optimization and provides a 2-3x speedup.
let good_guesses: Vec<&Word<N>> = if plausible_answers.len() <= 3 {
plausible_answers.clone()
} else {
self.allowed_guesses.iter().collect()
};
let guesses: Vec<(u64, &Word<N>)> = match self.adversarial {
false => good_guesses
.into_par_iter()
.map(|guess| {
(
_entropy(
&plausible_answers
.iter()
.map(|answer| _score(guess, answer))
.fast_counts(),
),
guess,
)
})
.collect(),
true => good_guesses
.into_par_iter()
.map(|guess| {
(
_min_surprise(
&plausible_answers
.iter()
.map(|answer| _score(guess, answer))
.fast_counts(),
)
.expect("at least one answer"),
guess,
)
})
.collect(),
};
let plausible_answers: HashSet<&Word<N>> = plausible_answers.into_iter().collect();
let best = guesses
.into_iter()
.max_by_key(|(info, guess)| (*info, plausible_answers.contains(guess), *guess))?;
self.cache.insert(clues.to_vec(), *best.1);
Some(*(best.1))
}
}
fn _play<const N: usize>(bot: &mut Bot<N>, answer: &Word<N>) -> Vec<(Word<N>, Score<N>)> {
let mut clues: Vec<(Word<N>, Score<N>)> = Vec::new();
loop {
let choice = bot.choice(&clues).unwrap();
let score = _score(&choice, answer);
clues.push((choice, score));
if choice == *answer {
break;
}
}
clues
}
fn _histogram<const N: usize>(wordlist: &str, adversarial: bool) -> HashMap<usize, usize> {
let mut bot: Bot<N> = Bot::new(_guesses(wordlist), _answers(wordlist), adversarial);
let answers = bot.allowed_answers.clone();
answers
.iter()
.map(|answer| {
let clues = _play(&mut bot, answer);
clues.len()
})
.fast_counts()
.into_iter()
.collect()
}
fn _choice<const N: usize>(
wordlist: &str,
clues: &str,
adversarial: bool,
) -> Result<String, Box<dyn std::error::Error>> {
let mut bot: Bot<N> = Bot::new(_guesses(wordlist), _answers(wordlist), adversarial);
match bot.choice(&_parse_clues(clues)) {
Some(guess) => Ok(guess.iter().join("")),
None => Err("Could not find a best guess".into()),
}
}
#[derive(StructOpt)]
pub struct Cli {
wordlist: String,
#[structopt(default_value = "")]
clues: String,
#[structopt(long)]
adversarial: bool,
}
fn _word<const N: usize>(line: &str) -> Word<N> {
line.trim()
.chars()
.collect::<Vec<char>>()
.as_slice()
.try_into()
.unwrap()
}
fn _word_length(text: &str) -> usize {
text.lines().next().unwrap().len()
}
fn _answers<const N: usize>(text: &str) -> Vec<Word<N>> {
text.lines()
.take_while(|line| !line.is_empty())
.map(_word)
.collect()
}
fn _guesses<const N: usize>(text: &str) -> Vec<Word<N>> {
text.lines()
.filter(|line| !line.is_empty())
.map(_word)
.collect()
}
fn _parse_clues<const N: usize>(clues: &str) -> Vec<(Word<N>, Score<N>)> {
let mut result = Vec::new();
if clues.is_empty() {
return result;
}
for clue in clues.split(',') {
let clue = clue.split(':').collect::<Vec<&str>>();
let guess = _word(clue[0]);
let score = clue[1]
.chars()
.map(|c| match c {
'1' => 1,
'2' => 2,
'3' => 3,
_ => panic!("Expected [1-3] but found {}", c),
})
.collect::<Vec<u8>>()
.as_slice()
.try_into()
.unwrap();
result.push((guess, score));
}
result
}
fn _read_bot<const N: usize>(args: &Cli) -> Bot<N> {
let wordlist = fs::read_to_string(&args.wordlist).unwrap();
Bot::new(_guesses(&wordlist), _answers(&wordlist), args.adversarial)
}
fn _main(args: &Cli) -> Result<String, Box<dyn std::error::Error>> {
let wordlist = fs::read_to_string(&args.wordlist).unwrap();
let guess = match _word_length(&wordlist) {
5 => _choice::<5>(&wordlist, &args.clues, args.adversarial),
6 => _choice::<6>(&wordlist, &args.clues, args.adversarial),
_ => todo!(),
}?;
Ok(guess)
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
env_logger::init();
let args = Cli::from_args();
let output = _main(&args)?;
println!("{}", output);
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
fn print_stats(histogram: &HashMap<usize, usize>) {
let num_answer = histogram.values().sum::<usize>();
let num_guess = histogram.iter().map(|(k, v)| k * v).sum::<usize>();
let max_guess = *histogram.keys().max().unwrap();
println!(
"Histogram: {:?}",
histogram
.into_iter()
.sorted()
.collect::<Vec<(&usize, &usize)>>()
);
println!("Num answers: {}", num_answer);
println!("Num guesses: {}", num_guess);
println!("Avg guesses: {}", num_guess as f64 / num_answer as f64);
println!("Max guesses: {}", max_guess);
}
#[test]
fn all_5_letter_words_can_be_solved() {
let wordlist = fs::read_to_string("wordlists/small5.txt").unwrap();
let histogram = _histogram::<5>(&wordlist, false);
print_stats(&histogram);
}
#[test]
fn all_6_letter_words_can_be_solved() {
let wordlist = fs::read_to_string("wordlists/small6.txt").unwrap();
let histogram = _histogram::<6>(&wordlist, false);
print_stats(&histogram);
}
#[test]
fn main_works_on_5_letter_example() {
let output = _main(&Cli {
wordlist: "wordlists/small5.txt".into(),
adversarial: false,
clues: "soare:13121".into(),
})
.unwrap();
// The answer need not be robot but it has been in the past so this is just a
// lazy way to ensure the output is not garbage.
assert_eq!(output, "fitch");
}
#[test]
fn main_works_on_6_letter_example() {
let output = _main(&Cli {
wordlist: "wordlists/small6.txt".into(),
adversarial: false,
clues: "tories:131211".into(),
})
.unwrap();
// The answer need not be robot but it has been in the past so this is just a
// lazy way to ensure the output is not garbage.
assert_eq!(output, "domain");
}
} | } |
sidebar_header.examples.tsx | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import { storiesOf } from '@storybook/react';
import { action } from '@storybook/addon-actions';
import { SidebarHeader } from '../sidebar_header';
const handlers = {
cloneNodes: action('cloneNodes'), | deleteNodes: action('deleteNodes'),
bringToFront: action('bringToFront'),
bringForward: action('bringForward'),
sendBackward: action('sendBackward'),
sendToBack: action('sendToBack'),
createCustomElement: action('createCustomElement'),
groupNodes: action('groupNodes'),
ungroupNodes: action('ungroupNodes'),
};
storiesOf('components/Sidebar/SidebarHeader', module)
.addDecorator(story => <div style={{ width: '300px' }}>{story()}</div>)
.add('default', () => <SidebarHeader title="Selected layer" {...handlers} />)
.add('without layer controls', () => (
<SidebarHeader title="Grouped element" showLayerControls={false} {...handlers} />
)); | copyNodes: action('copyNodes'),
cutNodes: action('cutNodes'),
pasteNodes: action('pasteNodes'), |
main.go | package main
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/sha256"
"encoding/json"
"flag"
"fmt"
"io/ioutil"
"net/http"
"os"
"strconv"
"github.com/btcsuite/btcutil/base58"
"golang.org/x/crypto/pbkdf2"
"github.com/matthewpi/privatebin/utils"
)
const (
specIterations = 100000
specKeySize = 256
specTagSize = 128
specAlgorithm = "aes"
specMode = "gcm"
specCompression = "none"
)
// PasteRequest .
type PasteRequest struct {
V int `json:"v"`
AData []interface{} `json:"adata"`
Meta PasteRequestMeta `json:"meta"`
CT string `json:"ct"`
}
// PasteRequestMeta .
type PasteRequestMeta struct {
Expire string `json:"expire"`
}
// PasteResponse .
type PasteResponse struct {
Status int `json:"status"`
ID string `json:"id"`
URL string `json:"url"`
DeleteToken string `json:"deletetoken"`
}
// PasteContent .
type PasteContent struct {
Paste string `json:"paste"`
}
// PasteSpec .
type PasteSpec struct {
IV string
Salt string
Iterations int
KeySize int
TagSize int
Algorithm string
Mode string
Compression string
}
// SpecArray .
func (spec *PasteSpec) SpecArray() []interface{} {
return []interface{}{
spec.IV,
spec.Salt,
spec.Iterations,
spec.KeySize,
spec.TagSize,
spec.Algorithm,
spec.Mode,
spec.Compression,
}
}
// PasteData .
type PasteData struct {
*PasteSpec
Data []byte
}
// adata .
func (paste *PasteData) adata() []interface{} {
return []interface{}{
paste.SpecArray(),
"plaintext",
0,
0,
}
}
func main() {
// Read from STDIN (Piped input)
input, err := ioutil.ReadAll(os.Stdin)
if err != nil {
panic(err)
}
// Remove extra line breaks to prevent PrivateBin from breaking.
if bytes.HasSuffix(input, []byte("\n")) {
input = input[:len(input)-1]
}
// Marshal the paste content to escape JSON characters.
pasteContent, err := json.Marshal(&PasteContent{Paste: utils.StripANSI(string(input))})
if err != nil {
panic(err)
}
var host string
var dry bool
var expire string
var key string
flag.StringVar(&host, "host", "https://privatebin.net", "private bin host url")
flag.BoolVar(&dry, "dry", false, "dry run. not send to the host, output json and hash")
flag.StringVar(&expire, "expire", "1day", "expire time, values[ 1day, 1week, 1month, never ]")
flag.StringVar(&key, "key", "", "hash key to encrypt")
flag.Parse()
var masterKey []byte
if key != "" {
masterKey = base58.Decode(key)
} else {
// Generate a master key for the paste.
master, err := utils.GenRandomBytes(32)
if err != nil {
panic(err)
}
masterKey = master
}
// Encrypt the paste data
pasteData, err := encrypt(masterKey, pasteContent)
if err != nil {
panic(err)
}
// Create a new Paste Request.
pasteRequest := &PasteRequest{
V: 2,
AData: pasteData.adata(),
Meta: PasteRequestMeta{
Expire: expire,
},
CT: utils.Base64(pasteData.Data),
}
// Get the Request Body.
body, err := json.Marshal(pasteRequest)
if err != nil {
panic(err)
}
if dry {
fmt.Printf("%v\n%v\n", base58.Encode(masterKey), bytes.NewBuffer(body))
return
}
// Create a new HTTP Client and HTTP Request.
client := &http.Client{}
req, err := http.NewRequest("POST", host, bytes.NewBuffer(body))
if err != nil {
panic(err)
}
// Set the request headers.
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Content-Length", strconv.Itoa(len(body)))
req.Header.Set("X-Requested-With", "JSONHttpRequest")
// Run the http request.
res, err := client.Do(req)
if err != nil {
panic(err)
}
// Close the request body once we are done.
defer func() {
if err := res.Body.Close(); err != nil {
panic(err)
}
}()
// Read the response body.
response, err := ioutil.ReadAll(res.Body)
if err != nil {
panic(err)
}
// Decode the response.
pasteResponse := &PasteResponse{}
if err := json.Unmarshal(response, &pasteResponse); err != nil {
panic(err)
}
fmt.Printf("%s%s#%s\n", host, pasteResponse.URL, base58.Encode(masterKey))
}
func encrypt(master []byte, message []byte) (*PasteData, error) | {
// Generate a initialization vector.
iv, err := utils.GenRandomBytes(12)
if err != nil {
return nil, err
}
// Generate salt.
salt, err := utils.GenRandomBytes(8)
if err != nil {
return nil, err
}
// Create the Paste Data and generate a key.
paste := &PasteData{
PasteSpec: &PasteSpec{
IV: utils.Base64(iv),
Salt: utils.Base64(salt),
Iterations: specIterations,
KeySize: specKeySize,
TagSize: specTagSize,
Algorithm: specAlgorithm,
Mode: specMode,
Compression: specCompression,
},
}
key := pbkdf2.Key(master, salt, paste.Iterations, 32, sha256.New)
// Get the "adata" for the paste.
adata, err := json.Marshal(paste.adata())
if err != nil {
return nil, err
}
// Create a new Cipher
c, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
// Create a new GCM.
gcm, err := cipher.NewGCM(c)
if err != nil {
return nil, err
}
// Sign the message.
data := gcm.Seal(nil, iv, message, adata)
// Update and return the paste data.
paste.Data = data
return paste, nil
} |
|
setter.rs | // Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Gamari <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::iter::FromIterator;
use std::ops::Deref;
use syntax::ast;
use syntax::ptr::P;
use syntax::ext::base::ExtCtxt;
use syntax::codemap::DUMMY_SP;
use syntax::ext::build::AstBuilder;
use syntax::ext::quote::rt::ToTokens;
use syntax::parse::token;
use super::Builder;
use super::super::node;
use super::utils;
/// A visitor to build the field setters for primitive registers
pub struct BuildSetters<'a> {
builder: &'a mut Builder,
cx: &'a ExtCtxt<'a>,
}
impl<'a> BuildSetters<'a> {
pub fn new(builder: &'a mut Builder, cx: &'a ExtCtxt<'a>)
-> BuildSetters<'a> {
BuildSetters { builder: builder, cx: cx }
}
}
impl<'a> node::RegVisitor for BuildSetters<'a> {
fn visit_prim_reg<'b>(&'b mut self, path: &Vec<String>,
reg: &'b node::Reg, _width: &node::RegWidth, fields: &Vec<node::Field>)
{
if fields.iter().any(|f| f.access != node::Access::ReadOnly) {
let it = build_type(self.cx, path, reg, fields);
self.builder.push_item(it);
let it = build_drop(self.cx, path, reg, fields);
self.builder.push_item(it);
let it = build_impl(self.cx, path, reg, fields);
self.builder.push_item(it);
// Build Copy impl
let ty_name = utils::setter_name(self.cx, path);
let it = quote_item!(self.cx,
impl<'a> ::core::kinds::Copy for $ty_name<'a> {});
self.builder.push_item(it.unwrap());
}
}
}
fn build_type(cx: &ExtCtxt, path: &Vec<String>,
reg: &node::Reg, _fields: &Vec<node::Field>) -> P<ast::Item>
{
let packed_ty = utils::reg_primitive_type(cx, reg)
.expect("Unexpected non-primitive register");
let name = utils::setter_name(cx, path);
let reg_ty = cx.ty_ident(DUMMY_SP, utils::path_ident(cx, path));
let reg_doc = match reg.docstring {
Some(d) => token::get_ident(d.node).get().to_string(),
None => "no documentation".to_string(),
};
let docstring = format!("Update value of `{}` register: {}",
reg.name.node,
reg_doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
let item = quote_item!(cx,
$doc_attr
#[allow(non_camel_case_types)]
pub struct $name<'a> {
value: $packed_ty,
mask: $packed_ty,
reg: &'a $reg_ty,
}
);
let mut item: ast::Item = item.unwrap().deref().clone();
item.span = reg.name.span;
P(item)
}
fn | <'a>(cx: &'a ExtCtxt, path: &Vec<String>)
-> P<ast::Item> {
let reg_ty: P<ast::Ty> =
cx.ty_ident(DUMMY_SP, utils::path_ident(cx, path));
let setter_ty: P<ast::Ty> = cx.ty_ident(DUMMY_SP,
utils::setter_name(cx, path));
let item = quote_item!(cx,
#[doc="Create a new updater"]
pub fn new(reg: &'a $reg_ty) -> $setter_ty {
$setter_ty {
value: 0,
mask: 0,
reg: reg,
}
});
item.unwrap()
}
fn build_drop(cx: &ExtCtxt, path: &Vec<String>,
reg: &node::Reg, fields: &Vec<node::Field>) -> P<ast::Item>
{
let setter_ty: P<ast::Ty> = cx.ty_ident(DUMMY_SP,
utils::setter_name(cx, path));
let unpacked_ty = utils::reg_primitive_type(cx, reg)
.expect("Unexpected non-primitive register");
// ensure we don't unintentionally clear a set-to-clear flag
let mut clear: u32 = 0;
for f in fields.iter() {
match f.access {
node::Access::SetToClear => {
let mask = 1 << (f.count.node * f.width) - 1;
clear |= mask;
},
_ => {},
}
}
// no need to read write-only registers
let wo_reg: bool = fields.iter().all(|f| f.access == node::Access::WriteOnly);
let initial_value =
if wo_reg {
quote_expr!(cx, 0)
} else {
quote_expr!(cx, self.reg.value.get())
};
let item = quote_item!(cx,
#[unsafe_destructor]
#[doc = "This performs the register update"]
impl<'a> Drop for $setter_ty<'a> {
fn drop(&mut self) {
let clear_mask: $unpacked_ty = $clear as $unpacked_ty;
if self.mask != 0 {
let v: $unpacked_ty = $initial_value & ! clear_mask & ! self.mask;
self.reg.value.set(self.value | v);
}
}
}
);
item.unwrap()
}
fn build_done(cx: &ExtCtxt) -> P<ast::Method>
{
quote_method!(cx,
#[doc="Commit changes to register. This is to allow chains of `set_*` \
invocations to be used as a statement."]
pub fn done(self) {}
)
}
fn build_impl(cx: &ExtCtxt, path: &Vec<String>, reg: &node::Reg,
fields: &Vec<node::Field>) -> P<ast::Item>
{
let new = build_new(cx, path);
let setter_ty: P<ast::Ty> = cx.ty_ident(
DUMMY_SP,
utils::setter_name(cx, path));
let methods: Vec<P<ast::Method>> =
FromIterator::from_iter(
fields.iter()
.filter_map(|field| build_field_fn(cx, path, reg, field)));
let done: P<ast::Method> = build_done(cx);
let impl_ = quote_item!(cx,
#[allow(dead_code)]
impl<'a> $setter_ty<'a> {
$new
$methods
$done
}
);
impl_.unwrap()
}
fn build_field_fn(cx: &ExtCtxt, path: &Vec<String>, reg: &node::Reg,
field: &node::Field) -> Option<P<ast::Method>>
{
match field.access {
node::Access::ReadOnly => None,
node::Access::SetToClear => Some(build_field_clear_fn(cx, path, reg, field)),
_ => Some(build_field_set_fn(cx, path, reg, field)),
}
}
/// Build a setter for a field
fn build_field_set_fn(cx: &ExtCtxt, path: &Vec<String>, reg: &node::Reg,
field: &node::Field) -> P<ast::Method>
{
let setter_ty = utils::setter_name(cx, path);
let unpacked_ty = utils::reg_primitive_type(cx, reg)
.expect("Unexpected non-primitive register");
let fn_name =
cx.ident_of((String::from_str("set_")+field.name.node.as_slice()).as_slice());
let field_ty: P<ast::Ty> =
cx.ty_path(utils::field_type_path(cx, path, reg, field));
let mask = utils::mask(cx, field);
let field_doc = match field.docstring {
Some(d) => token::get_ident(d.node).get().to_string(),
None => "no documentation".to_string(),
};
let docstring = format!("Set value of `{}` field: {}",
field.name.node,
field_doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
if field.count.node == 1 {
let shift = utils::shift(cx, None, field);
quote_method!(cx,
$doc_attr
pub fn $fn_name<'b>(&'b mut self, new_value: $field_ty)
-> &'b mut $setter_ty<'a> {
self.value |= (self.value & ! $mask) | ((new_value as $unpacked_ty) & $mask) << $shift;
self.mask |= $mask << $shift;
self
}
)
} else {
let shift = utils::shift(cx, Some(quote_expr!(cx, idx)), field);
quote_method!(cx,
$doc_attr
pub fn $fn_name<'b>(&'b mut self, idx: uint, new_value: $field_ty)
-> &'b mut $setter_ty<'a> {
self.value |= (self.value & ! $mask) | ((new_value as $unpacked_ty) & $mask) << $shift;
self.mask |= $mask << $shift;
self
}
)
}
}
fn build_field_clear_fn(cx: &ExtCtxt, path: &Vec<String>,
_: &node::Reg, field: &node::Field) -> P<ast::Method>
{
let setter_ty = utils::setter_name(cx, path);
let fn_name =
cx.ident_of((String::from_str("clear_")+field.name.node.as_slice()).as_slice());
let mask = utils::mask(cx, field);
let field_doc = match field.docstring {
Some(d) => token::get_ident(d.node).get().to_string(),
None => "no documentation".to_string(),
};
let docstring = format!("Clear `{}` flag: {}",
field.name.node,
field_doc);
let doc_attr = utils::doc_attribute(cx, utils::intern_string(cx, docstring));
if field.count.node == 1 {
let shift = utils::shift(cx, None, field);
quote_method!(cx,
$doc_attr
pub fn $fn_name<'b>(&'b mut self) -> &'b mut $setter_ty<'a> {
self.value |= $mask << $shift;
self.mask |= $mask << $shift;
self
}
)
} else {
let shift = utils::shift(cx, Some(quote_expr!(cx, idx)), field);
quote_method!(cx,
$doc_attr
pub fn $fn_name<'b>(&'b mut self, idx: uint) -> &'b mut $setter_ty<'a> {
self.value |= $mask << $shift;
self.mask |= $mask << $shift;
self
}
)
}
}
| build_new |
__init__.py | """Top level command for Treadmill reports.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import click
import pandas as pd
import tabulate
from six.moves import urllib_parse
from treadmill import cli
from treadmill import context
from treadmill import plugin_manager
from treadmill import restclient
def fetch_report(cell_api, report_type, match=None, partition=None):
"""Fetch a report of the given type and return it as a DataFrame."""
api_urls = context.GLOBAL.cell_api(cell_api)
path = '/scheduler/{}'.format(report_type)
query = {}
if match:
query['match'] = match
if partition:
query['partition'] = partition | path += '?' + urllib_parse.urlencode(query)
response = restclient.get(api_urls, path).json()
return pd.DataFrame(response['data'], columns=response['columns'])
def print_report(frame):
"""Pretty-print the report."""
if cli.OUTPUT_FORMAT is None:
frame.replace(True, ' ', inplace=True)
frame.replace(False, 'X', inplace=True)
dict_ = frame.to_dict(orient='split')
del dict_['index']
cli.out(
tabulate.tabulate(
dict_['data'], dict_['columns'], tablefmt='simple'
)
)
cli.echo_green('\nX: designates the factor that prohibits scheduling '
'the instance on the given server')
elif cli.OUTPUT_FORMAT == 'yaml':
fmt = plugin_manager.load('treadmill.formatters', 'yaml')
cli.out(fmt.format(frame.to_dict(orient='records')))
elif cli.OUTPUT_FORMAT == 'json':
cli.out(frame.to_json(orient='records'))
elif cli.OUTPUT_FORMAT == 'csv':
cli.out(frame.to_csv(index=False))
else:
cli.out(tabulate.tabulate(frame, frame.columns, tablefmt='simple'))
def init():
"""Return top level command handler."""
@click.group(cls=cli.make_commands(__name__))
@click.option(
'--cell',
help='Treadmill cell',
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False,
required=True
)
@click.option(
'--api',
help='Cell API URL',
metavar='URL',
envvar='TREADMILL_CELLAPI'
)
@click.pass_context
def run(ctx, api):
"""Report scheduler state."""
if not ctx.obj:
ctx.obj = {} # Doesn't seem to exist in testing
ctx.obj['api'] = api
return run |
if query: |
storage.rs | use std::cmp::{Ordering, PartialEq};
use std::ops::{Index, IndexMut};
use std::vec::Drain;
use serde::{Deserialize, Serialize};
use super::Row;
use crate::index::Line;
/// Maximum number of invisible lines before buffer is resized
const TRUNCATE_STEP: usize = 100;
/// A ring buffer for optimizing indexing and rotation.
///
/// The [`Storage::rotate`] and [`Storage::rotate_up`] functions are fast modular additions on the
/// internal [`zero`] field. As compared with [`slice::rotate_left`] which must rearrange items in
/// memory.
///
/// As a consequence, both [`Index`] and [`IndexMut`] are reimplemented for this type to account
/// for the zeroth element not always being at the start of the allocation.
///
/// Because certain [`Vec`] operations are no longer valid on this type, no [`Deref`]
/// implementation is provided. Anything from [`Vec`] that should be exposed must be done so
/// manually.
///
/// [`slice::rotate_left`]: https://doc.rust-lang.org/std/primitive.slice.html#method.rotate_left
/// [`Deref`]: std::ops::Deref
/// [`zero`]: #structfield.zero
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Storage<T> {
inner: Vec<Row<T>>,
/// Starting point for the storage of rows.
///
/// This value represents the starting line offset within the ring buffer. The value of this
/// offset may be larger than the `len` itself, and will wrap around to the start to form the
/// ring buffer. It represents the bottommost line of the terminal.
zero: usize,
/// An index separating the visible and scrollback regions.
visible_lines: Line,
/// Total number of lines currently active in the terminal (scrollback + visible)
///
/// Shrinking this length allows reducing the number of lines in the scrollback buffer without
/// having to truncate the raw `inner` buffer.
/// As long as `len` is bigger than `inner`, it is also possible to grow the scrollback buffer
/// without any additional insertions.
#[serde(default)]
len: usize,
}
impl<T: PartialEq> PartialEq for Storage<T> {
fn eq(&self, other: &Self) -> bool {
// Make sure length is equal
if self.inner.len() != other.inner.len() {
return false;
}
// Check which vec has the bigger zero
let (ref bigger, ref smaller) =
if self.zero >= other.zero { (self, other) } else { (other, self) };
// Calculate the actual zero offset
let bigger_zero = bigger.zero;
let smaller_zero = smaller.zero;
// Compare the slices in chunks
// Chunks:
// - Bigger zero to the end
// - Remaining lines in smaller zero vec
// - Beginning of smaller zero vec
//
// Example:
// Bigger Zero (6):
// 4 5 6 | 7 8 9 | 0 1 2 3
// C2 C2 C2 | C3 C3 C3 | C1 C1 C1 C1
// Smaller Zero (3):
// 7 8 9 | 0 1 2 3 | 4 5 6
// C3 C3 C3 | C1 C1 C1 C1 | C2 C2 C2
let len = self.inner.len();
bigger.inner[bigger_zero..]
== smaller.inner[smaller_zero..smaller_zero + (len - bigger_zero)]
&& bigger.inner[..bigger_zero - smaller_zero]
== smaller.inner[smaller_zero + (len - bigger_zero)..]
&& bigger.inner[bigger_zero - smaller_zero..bigger_zero]
== smaller.inner[..smaller_zero]
}
}
impl<T> Storage<T> {
#[inline]
pub fn with_capacity(lines: Line, template: Row<T>) -> Storage<T>
where
T: Clone,
{
// Initialize visible lines, the scrollback buffer is initialized dynamically
let inner = vec![template; lines.0];
Storage { inner, zero: 0, visible_lines: lines - 1, len: lines.0 }
}
/// Update the size of the scrollback history
pub fn update_history(&mut self, history_size: usize, template_row: Row<T>)
where
T: Clone,
{
let current_history = self.len - (self.visible_lines.0 + 1);
match history_size.cmp(¤t_history) {
Ordering::Greater => self.grow_lines(history_size - current_history, template_row),
Ordering::Less => self.shrink_lines(current_history - history_size),
_ => (),
}
}
/// Increase the number of lines in the buffer
pub fn grow_visible_lines(&mut self, next: Line, template_row: Row<T>)
where
T: Clone,
{
// Number of lines the buffer needs to grow
let growage = (next - (self.visible_lines + 1)).0;
self.grow_lines(growage, template_row);
// Update visible lines
self.visible_lines = next - 1;
}
/// Grow the number of lines in the buffer, filling new lines with the template
fn | (&mut self, growage: usize, template_row: Row<T>)
where
T: Clone,
{
// Only grow if there are not enough lines still hidden
let mut new_growage = 0;
if growage > (self.inner.len() - self.len) {
// Lines to grow additionally to invisible lines
new_growage = growage - (self.inner.len() - self.len);
// Split off the beginning of the raw inner buffer
let mut start_buffer = self.inner.split_off(self.zero);
// Insert new template rows at the end of the raw inner buffer
let mut new_lines = vec![template_row; new_growage];
self.inner.append(&mut new_lines);
// Add the start to the raw inner buffer again
self.inner.append(&mut start_buffer);
}
// Update raw buffer length and zero offset
self.zero += new_growage;
self.len += growage;
}
/// Decrease the number of lines in the buffer
pub fn shrink_visible_lines(&mut self, next: Line) {
// Shrink the size without removing any lines
let shrinkage = (self.visible_lines - (next - 1)).0;
self.shrink_lines(shrinkage);
// Update visible lines
self.visible_lines = next - 1;
}
// Shrink the number of lines in the buffer
pub fn shrink_lines(&mut self, shrinkage: usize) {
self.len -= shrinkage;
// Free memory
if self.inner.len() > self.len() + TRUNCATE_STEP {
self.truncate();
}
}
/// Truncate the invisible elements from the raw buffer
pub fn truncate(&mut self) {
self.inner.rotate_left(self.zero);
self.inner.truncate(self.len);
self.zero = 0;
}
/// Dynamically grow the storage buffer at runtime
pub fn initialize(&mut self, num_rows: usize, template_row: Row<T>)
where
T: Clone,
{
let mut new = vec![template_row; num_rows];
let mut split = self.inner.split_off(self.zero);
self.inner.append(&mut new);
self.inner.append(&mut split);
self.zero += num_rows;
self.len += num_rows;
}
#[inline]
pub fn len(&self) -> usize {
self.len
}
/// Compute actual index in underlying storage given the requested index.
#[inline]
fn compute_index(&self, requested: usize) -> usize {
debug_assert!(requested < self.len);
let zeroed = self.zero + requested;
// Use if/else instead of remainder here to improve performance.
//
// Requires `zeroed` to be smaller than `self.inner.len() * 2`,
// but both `self.zero` and `requested` are always smaller than `self.inner.len()`.
if zeroed >= self.inner.len() {
zeroed - self.inner.len()
} else {
zeroed
}
}
pub fn swap_lines(&mut self, a: Line, b: Line) {
let offset = self.inner.len() + self.zero + *self.visible_lines;
let a = (offset - *a) % self.inner.len();
let b = (offset - *b) % self.inner.len();
self.inner.swap(a, b);
}
/// Swap implementation for Row<T>.
///
/// Exploits the known size of Row<T> to produce a slightly more efficient
/// swap than going through slice::swap.
///
/// The default implementation from swap generates 8 movups and 4 movaps
/// instructions. This implementation achieves the swap in only 8 movups
/// instructions.
pub fn swap(&mut self, a: usize, b: usize) {
debug_assert_eq!(std::mem::size_of::<Row<T>>(), 32);
let a = self.compute_index(a);
let b = self.compute_index(b);
unsafe {
// Cast to a qword array to opt out of copy restrictions and avoid
// drop hazards. Byte array is no good here since for whatever
// reason LLVM won't optimized it.
let a_ptr = self.inner.as_mut_ptr().add(a) as *mut usize;
let b_ptr = self.inner.as_mut_ptr().add(b) as *mut usize;
// Copy 1 qword at a time
//
// The optimizer unrolls this loop and vectorizes it.
let mut tmp: usize;
for i in 0..4 {
tmp = *a_ptr.offset(i);
*a_ptr.offset(i) = *b_ptr.offset(i);
*b_ptr.offset(i) = tmp;
}
}
}
/// Rotate the grid, moving all lines up/down in history.
#[inline]
pub fn rotate(&mut self, count: isize) {
debug_assert!(count.abs() as usize <= self.inner.len());
let len = self.inner.len();
self.zero = (self.zero as isize + count + len as isize) as usize % self.inner.len();
}
/// Rotate the grid up, moving all existing lines down in history.
///
/// This is a faster, specialized version of [`rotate`].
#[inline]
pub fn rotate_up(&mut self, count: usize) {
self.zero = (self.zero + count) % self.inner.len();
}
/// Drain all rows in the grid.
pub fn drain(&mut self) -> Drain<'_, Row<T>> {
self.truncate();
self.inner.drain(..)
}
/// Update the raw storage buffer.
pub fn replace_inner(&mut self, vec: Vec<Row<T>>) {
self.len = vec.len();
self.inner = vec;
self.zero = 0;
}
}
impl<T> Index<usize> for Storage<T> {
type Output = Row<T>;
#[inline]
fn index(&self, index: usize) -> &Self::Output {
&self.inner[self.compute_index(index)]
}
}
impl<T> IndexMut<usize> for Storage<T> {
#[inline]
fn index_mut(&mut self, index: usize) -> &mut Self::Output {
let index = self.compute_index(index); // borrowck
&mut self.inner[index]
}
}
impl<T> Index<Line> for Storage<T> {
type Output = Row<T>;
#[inline]
fn index(&self, index: Line) -> &Self::Output {
let index = self.visible_lines - index;
&self[*index]
}
}
impl<T> IndexMut<Line> for Storage<T> {
#[inline]
fn index_mut(&mut self, index: Line) -> &mut Self::Output {
let index = self.visible_lines - index;
&mut self[*index]
}
}
#[cfg(test)]
mod test {
use crate::grid::row::Row;
use crate::grid::storage::Storage;
use crate::grid::GridCell;
use crate::index::{Column, Line};
impl GridCell for char {
fn is_empty(&self) -> bool {
*self == ' ' || *self == '\t'
}
fn is_wrap(&self) -> bool {
false
}
fn set_wrap(&mut self, _wrap: bool) {}
fn fast_eq(&self, other: Self) -> bool {
self == &other
}
}
#[test]
fn with_capacity() {
let storage = Storage::with_capacity(Line(3), Row::new(Column(0), &' '));
assert_eq!(storage.inner.len(), 3);
assert_eq!(storage.len, 3);
assert_eq!(storage.zero, 0);
assert_eq!(storage.visible_lines, Line(2));
}
#[test]
fn indexing() {
let mut storage = Storage::with_capacity(Line(3), Row::new(Column(0), &' '));
storage[0] = Row::new(Column(1), &'0');
storage[1] = Row::new(Column(1), &'1');
storage[2] = Row::new(Column(1), &'2');
assert_eq!(storage[0], Row::new(Column(1), &'0'));
assert_eq!(storage[1], Row::new(Column(1), &'1'));
assert_eq!(storage[2], Row::new(Column(1), &'2'));
storage.zero += 1;
assert_eq!(storage[0], Row::new(Column(1), &'1'));
assert_eq!(storage[1], Row::new(Column(1), &'2'));
assert_eq!(storage[2], Row::new(Column(1), &'0'));
}
#[test]
#[should_panic]
fn indexing_above_inner_len() {
let storage = Storage::with_capacity(Line(1), Row::new(Column(0), &' '));
let _ = &storage[2];
}
#[test]
fn rotate() {
let mut storage = Storage::with_capacity(Line(3), Row::new(Column(0), &' '));
storage.rotate(2);
assert_eq!(storage.zero, 2);
storage.shrink_lines(2);
assert_eq!(storage.len, 1);
assert_eq!(storage.inner.len(), 3);
assert_eq!(storage.zero, 2);
}
/// Grow the buffer one line at the end of the buffer
///
/// Before:
/// 0: 0 <- Zero
/// 1: 1
/// 2: -
/// After:
/// 0: -
/// 1: 0 <- Zero
/// 2: 1
/// 3: -
#[test]
fn grow_after_zero() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'-'),
],
zero: 0,
visible_lines: Line(2),
len: 3,
};
// Grow buffer
storage.grow_visible_lines(Line(4), Row::new(Column(1), &'-'));
// Make sure the result is correct
let expected = Storage {
inner: vec![
Row::new(Column(1), &'-'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'-'),
],
zero: 1,
visible_lines: Line(0),
len: 4,
};
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Grow the buffer one line at the start of the buffer
///
/// Before:
/// 0: -
/// 1: 0 <- Zero
/// 2: 1
/// After:
/// 0: -
/// 1: -
/// 2: 0 <- Zero
/// 3: 1
#[test]
fn grow_before_zero() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'-'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
],
zero: 1,
visible_lines: Line(2),
len: 3,
};
// Grow buffer
storage.grow_visible_lines(Line(4), Row::new(Column(1), &'-'));
// Make sure the result is correct
let expected = Storage {
inner: vec![
Row::new(Column(1), &'-'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
],
zero: 2,
visible_lines: Line(0),
len: 4,
};
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer one line at the start of the buffer
///
/// Before:
/// 0: 2
/// 1: 0 <- Zero
/// 2: 1
/// After:
/// 0: 2 <- Hidden
/// 0: 0 <- Zero
/// 1: 1
#[test]
fn shrink_before_zero() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'2'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
],
zero: 1,
visible_lines: Line(2),
len: 3,
};
// Shrink buffer
storage.shrink_visible_lines(Line(2));
// Make sure the result is correct
let expected = Storage {
inner: vec![
Row::new(Column(1), &'2'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
],
zero: 1,
visible_lines: Line(0),
len: 2,
};
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer one line at the end of the buffer
///
/// Before:
/// 0: 0 <- Zero
/// 1: 1
/// 2: 2
/// After:
/// 0: 0 <- Zero
/// 1: 1
/// 2: 2 <- Hidden
#[test]
fn shrink_after_zero() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
],
zero: 0,
visible_lines: Line(2),
len: 3,
};
// Shrink buffer
storage.shrink_visible_lines(Line(2));
// Make sure the result is correct
let expected = Storage {
inner: vec![
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
],
zero: 0,
visible_lines: Line(0),
len: 2,
};
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Shrink the buffer at the start and end of the buffer
///
/// Before:
/// 0: 4
/// 1: 5
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3
/// After:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2 <- Hidden
/// 5: 3 <- Hidden
#[test]
fn shrink_before_and_after_zero() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(5),
len: 6,
};
// Shrink buffer
storage.shrink_visible_lines(Line(2));
// Make sure the result is correct
let expected = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(0),
len: 2,
};
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Check that when truncating all hidden lines are removed from the raw buffer
///
/// Before:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2 <- Hidden
/// 5: 3 <- Hidden
/// After:
/// 0: 0 <- Zero
/// 1: 1
#[test]
fn truncate_invisible_lines() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(1),
len: 2,
};
// Truncate buffer
storage.truncate();
// Make sure the result is correct
let expected = Storage {
inner: vec![Row::new(Column(1), &'0'), Row::new(Column(1), &'1')],
zero: 0,
visible_lines: Line(1),
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// Truncate buffer only at the beginning
///
/// Before:
/// 0: 1
/// 1: 2 <- Hidden
/// 2: 0 <- Zero
/// After:
/// 0: 1
/// 0: 0 <- Zero
#[test]
fn truncate_invisible_lines_beginning() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'0'),
],
zero: 2,
visible_lines: Line(1),
len: 2,
};
// Truncate buffer
storage.truncate();
// Make sure the result is correct
let expected = Storage {
inner: vec![Row::new(Column(1), &'0'), Row::new(Column(1), &'1')],
zero: 0,
visible_lines: Line(1),
len: 2,
};
assert_eq!(storage.visible_lines, expected.visible_lines);
assert_eq!(storage.inner, expected.inner);
assert_eq!(storage.zero, expected.zero);
assert_eq!(storage.len, expected.len);
}
/// First shrink the buffer and then grow it again
///
/// Before:
/// 0: 4
/// 1: 5
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3
/// After Shrinking:
/// 0: 4 <- Hidden
/// 1: 5 <- Hidden
/// 2: 0 <- Zero
/// 3: 1
/// 4: 2
/// 5: 3 <- Hidden
/// After Growing:
/// 0: 4
/// 1: 5
/// 2: -
/// 3: 0 <- Zero
/// 4: 1
/// 5: 2
/// 6: 3
#[test]
fn shrink_then_grow() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(0),
len: 6,
};
// Shrink buffer
storage.shrink_lines(3);
// Make sure the result after shrinking is correct
let shrinking_expected = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(0),
len: 3,
};
assert_eq!(storage.inner, shrinking_expected.inner);
assert_eq!(storage.zero, shrinking_expected.zero);
assert_eq!(storage.len, shrinking_expected.len);
// Grow buffer
storage.grow_lines(4, Row::new(Column(1), &'-'));
// Make sure the result after shrinking is correct
let growing_expected = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 3,
visible_lines: Line(0),
len: 7,
};
assert_eq!(storage.inner, growing_expected.inner);
assert_eq!(storage.zero, growing_expected.zero);
assert_eq!(storage.len, growing_expected.len);
}
#[test]
fn initialize() {
// Setup storage area
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 2,
visible_lines: Line(0),
len: 6,
};
// Initialize additional lines
storage.initialize(3, Row::new(Column(1), &'-'));
// Make sure the lines are present and at the right location
let shrinking_expected = Storage {
inner: vec![
Row::new(Column(1), &'4'),
Row::new(Column(1), &'5'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'0'),
Row::new(Column(1), &'1'),
Row::new(Column(1), &'2'),
Row::new(Column(1), &'3'),
],
zero: 5,
visible_lines: Line(0),
len: 9,
};
assert_eq!(storage.inner, shrinking_expected.inner);
assert_eq!(storage.zero, shrinking_expected.zero);
assert_eq!(storage.len, shrinking_expected.len);
}
#[test]
fn rotate_wrap_zero() {
let mut storage = Storage {
inner: vec![
Row::new(Column(1), &'-'),
Row::new(Column(1), &'-'),
Row::new(Column(1), &'-'),
],
zero: 2,
visible_lines: Line(0),
len: 3,
};
storage.rotate(2);
assert!(storage.zero < storage.inner.len());
}
}
| grow_lines |
util.rs | use std::io;
use std::str::FromStr;
use std::fmt::Debug;
pub fn input () -> String
{
let mut ret = String::new();
io::stdin().read_line(&mut ret).expect("Failed to read from stdin");
ret
}
pub fn parse_num<T>() -> T where T: FromStr, <T as FromStr>::Err: Debug {
let inp = input().trim().parse::<T>().unwrap();
inp
}
pub fn parse_line_of_nums<T>() -> Vec<T> where T: FromStr, <T as FromStr>::Err: Debug {
let inp = input()
.trim()
.split_ascii_whitespace() | } | .map(|i| i.parse::<T>().unwrap())
.collect::<Vec<T>>();
inp |
environment.prod.ts | export const environment = {
production: true,
SERVER_URL : 'http://localhost:2302/'
}; |
||
script.js | console.log("background script working");
var t = null;
// var l = null;
let a = {};
xp = location.href;
if (xp.includes("meet.google.com")) {
setTimeout(joinmeeting, 1000);
}
function joinmeeting() {
if (document.readyState == "complete") {
try {
// Mic Button
window.document.querySelectorAll(".U26fgb")[0].click();
// Webcam Button
window.document.querySelectorAll(".U26fgb")[1].click();
window.document.querySelector(".NPEfkd").click();
} catch (err) {
console.log("EROOR");
}
}
}
function rtntime() {
var myDate = new Date();
var hours = myDate.getHours();
var minutes = myDate.getMinutes();
if (hours < 10) { hours = "0" + hours };
if (minutes < 10) { minutes = "0" + minutes; }
var timetrigger = (hours + ":" + minutes);
return timetrigger;
}
chrome.runtime.onMessage.addListener(
function (request, sender, sendResponse) {
if (request.message === "tickbtnclicked") {
a = request.object;
console.log(a);
if (t != null) {
clearTimeout(t);
t = null;
}
// if (l != null) {
// clearTimeout(l);
// l = null;
// }
checker();
// if(a.checkbox=="true"){
// leavetime=a.time
// leaver();
// }
}
if (request.message === "stopbtnclicked") {
console.log("Stop btn clicked");
}
}
);
function | () {
location.replace(a.link);
joinmeeting();
setTimeout(checker, (60 - new Date().getSeconds()) * 1000);
console.log("check");
}
// function leaver() {
// try {
// if (document.querySelector(".gV3Svc>span").nextElementSibling.innerText < 10) {
// console.log("i did it");
// //leave btn
// try {
// window.document.querySelector(".FbBiwc").click();
// } catch {
// console.log("ERROR");
// }
// }
// } catch {
// console.log("Error");
// }
// } | checker |
test_util.py | # Copyright 2019 Graphcore Ltd.
from statistics import mean
import numpy as np
import os
import re
import subprocess
import sys
import time
"""Library of utility functions common between frameworks"""
def parse_results_for_speed(output, iter_tolerance, speed_tolerance):
"""Look for <iter number> sec/itr. <speed number> {other stuff}"""
found_a_result = False
for line in output.split("\n"):
matches = re.match(r"([\d.]+) +sec/itr. +([\d.]+)", line)
if matches:
found_a_result = True
iterations, speed = matches.groups()
iterations = float(iterations)
speed = float(speed)
_verify_model_numbers(
iter_tolerance, iterations, speed_tolerance, speed, line
)
if not found_a_result:
raise AssertionError("No results detected in this run")
def | (output, expected_accuracies, acc_tolerance):
"""Look for Accuracy=<accuracy>%"""
accuracies = []
for line in output.split("\n"):
if re.match(r" + Accuracy=+([\d.]+)%", line):
accuracy = float(re.match(r" + Accuracy=+([\d.]+)%", line).groups()[0])
accuracies.append(accuracy)
elif re.search(r"Validation accuracy", line):
accuracy_str = re.search(r"accuracy:\s(.*)", line).group(1)
accuracy = float(accuracy_str[:accuracy_str.rfind("%")])
accuracies.append(accuracy)
if len(accuracies) == 0:
raise AssertionError("No results detected in this run")
elif len(accuracies) != len(expected_accuracies):
raise AssertionError("Expected accuracies and parsed accuracies have"
" different lengths")
_verify_model_accuracies(accuracies, expected_accuracies, acc_tolerance)
def _verify_model_numbers(iter_tolerance, iterations,
speed_tolerance, speed, line):
iter_error = ""
speed_error = ""
# Verify iteration speed
if iterations > iter_tolerance[1]:
iter_error = ("The time per iteration has regressed above"
" the tolerance maximum: " +
str(iter_tolerance[1]))
elif iterations < iter_tolerance[0]:
iter_error = ("Time taken to compete an iteration was "
"suspiciously fast. Please verify the model"
" is operating correctly and tune tolerances"
" accordingly.")
# Verify item processing speed
if speed < speed_tolerance[0]:
speed_error = ("The number of items processed per second"
" has regressed below the tolerance: " +
str(speed_tolerance[0]))
elif speed > speed_tolerance[1]:
speed_error = ("The number of items processed per second"
" was suspiciously high. Please verify the"
" model is behaving correctly and tune"
" tolerances accordingly.")
if iter_error and speed_error:
sys.stderr.write("\n".join([line, iter_error, speed_error]))
raise AssertionError("Timings out of tolerance range")
elif iter_error or speed_error:
sys.stderr.write(line)
raise AssertionError(iter_error + speed_error)
def _verify_model_accuracies(accuracies, expected_accuracy, acc_tolerance):
"""Asserts a list of accuracies is within a list of expected accuracies
with a tolerance applied.
Args:
accuracies: A list of floats representing the accuracies (%) produced
by the model at each step.
expected_accuracy: A list of floats representing the expected
accuracies (%) produced by the model at each step.
acc_tolerance: A float representing a percentage tolerance applied on
top of the expected accuracies that the accuracies produced by
the model should sit within.
Raises:
Assertion Error: Accuracy produced by the model are not within
the expected limits.
"""
for iter_num in range(len(accuracies)):
exp_acc = expected_accuracy[iter_num]
exp_acc_str = (
"{0} = {1} +- {2} = [{3:.{5}f}, {4:.{5}f}]".format(
"Expected accuracy (%)".ljust(22),
exp_acc,
acc_tolerance,
exp_acc - acc_tolerance,
exp_acc + acc_tolerance,
2
)
)
acc = accuracies[iter_num]
acc_str = "{} = {:.{}f}".format(
"Accuracy (%)".ljust(22),
acc,
2
)
full_acc_str = "{}\n{}".format(acc_str, exp_acc_str)
if acc < exp_acc - acc_tolerance:
raise AssertionError(
"After iteration {}, the model is less accurate"
" than expected.\n"
"{}".format(iter_num + 1, full_acc_str)
)
elif acc > exp_acc + acc_tolerance:
raise AssertionError(
"After iteration {}, the model is producing an accuracy"
" that is suspiciously high and should be reviewed.\n"
"{}".format(iter_num + 1, full_acc_str)
)
def assert_result_equals_tensor_value(output, tensor):
"""Searches for a single tensor result in the first line of the output
Searches the first line of the string output for a line with format
'[array([3., 8.], dtype=float32)]' and asserts its equal to the numpy
tensor argument
Args:
output: String containing the string representation of a numpy
tensor
tensor: numpy tensor representing the expected result
Returns:
None
Raises:
Assertion Error: Output is not in correct format
Assertion Error: Output does not contain a string representation
of a numpy array
Assertion Error: Output numpy array does not equal the expected
numpy array
"""
# TODO - np representation over multiple lines
# TODO - large np array output
# TODO - multiple dimension np output
list_regex = r"^\[.*?\]$"
np_array_str_regex = r"array\(.*?, dtype=.*?\)$"
first_line = output.split("\n")[0]
if not re.match(list_regex, first_line):
raise AssertionError(
"Result not in expected string format."
" Expecting stringified list "
" eg. [array([3., 8.], dtype=float32)]"
)
contents = first_line[1:-1]
if not re.match(np_array_str_regex, contents):
raise AssertionError(
"Expecting numpy representation "
"array with dtype "
"eg. array([3., 8.], dtype=float32)"
)
assert contents == np.array_repr(tensor), (
"Output value {} does not "
"equal expected value {}".format(np.array_repr(contents), tensor)
)
def parse_results_for_ipus_used(output):
"""Finds the number of IPUs used in the model by looking for
string with format ' On 2 IPUs.' in output"""
shards_regex = r" On ([\d.]+) IPUs."
for line in output.split("\n"):
matches = re.match(shards_regex, line)
if matches:
shards = matches.group(1)
return int(shards)
raise AssertionError("Expecting line detailing IPU usage "
"eg. ' On 2 IPUs.'")
def assert_shards(output, expected_shards):
"""Verify the expected number of shards used were actually
used"""
actual_shards = parse_results_for_ipus_used(output)
assert actual_shards == expected_shards
def get_final_accuracy(output):
"""Find and return the accuracy reported in a test's output."""
result_regex = r"Accuracy=([\d.]+)\%"
result_list = parse_results_with_regex(output, result_regex)
result = result_list[0]
return result[-1]
def get_final_loss(output):
"""Find and return the loss reported in a test's output."""
result_regex = r"Loss=([\d.]+)"
result_list = parse_results_with_regex(output, result_regex)
result = result_list[0]
return result[-1]
def get_average_speeds(output):
"""Finds the average seconds/iteration and tokens/second
Args:
output: String representing the output of a test.
Returns:
A tuple where the first element is a float representing
the average iterations per second and the second the
average tokens processed per second
"""
result_regex = r"([\d.]+) +sec/itr. +([\d.]+)"
results = parse_results_with_regex(output, result_regex)
itr_sec_list = results[0]
tokens_sec_list = results[1]
return mean(itr_sec_list), mean(tokens_sec_list)
def parse_results_with_regex(output, regex):
"""Find and returns the regex matching results in output
Looks through the output line by line looking for a matching regex.
The function assembles a list of lists where each parent list is
the results for that position in the regex string and each item in
the child lists represents an order of the results found in the output
Args:
output: String representing the output of a test.
regex: Regex of result to find.
Returns:
A list of lists of floats. Parent list represents the result at each
position in the regex. Child list contains results received in the
order they were output.
Raises:
AssertionError: a line matching the regex could not be found in the
output
"""
results = []
for line in output.split("\n"):
matches = re.search(regex, line)
if matches:
number_of_results = matches.lastindex
if results == []:
results = [None] * number_of_results
for match_index in range(0, number_of_results):
result = float(matches.group(match_index + 1))
if results[match_index]:
results[match_index].append(result)
continue
results[match_index] = [result]
if results == []:
raise AssertionError("Regex {} not found in result".format(regex))
return results
def get_total_epochs(output):
"""Finds the number of epochs model has run through by looking for
string with format 'Epoch #3' in the models raw output"""
epochs = None
for line in output.split("\n"):
epoch_match = re.search(r"Epoch #([\d.]+)", line)
if epoch_match:
epochs = int(epoch_match.group(1))
if not epochs:
raise AssertionError("Epochs not found in output, eg. "
"Epoch #3")
return epochs
def assert_total_run_time(total_time, time_range):
"""Checks total run time is within the required range
Args:
total_time: float representing number of seconds the test took to
run
time_range: a tuple of floats where the first element is the minimum
time the test should run in in seconds and the second the
maximum
Raises:
AssertionError: if the total_time is not between the minimum time
and maximum time
"""
minimum_time = time_range[0]
maximum_time = time_range[1]
assert total_time >= minimum_time
assert total_time <= maximum_time
def assert_final_accuracy(output, minimum, maximum):
"""Gets the final accuracy given a raw model output and checks its value
is between the minimum and maximum
Args:
output: String representing the raw output of a model
minimum: a float representing a percentage (between 0.0% and 100%)
that is the minimum accuracy for the model after running
maximum: a float representing a percentage (between 0.0% and 100%)
that is the maximum accuracy for the model after running
Raises:
AssertionError: if the final accuracy is not between the maximum and
minimum percentages
"""
accuracy = get_final_accuracy(output)
assert accuracy >= minimum
assert accuracy <= maximum
def run_python_script_helper(cwd, script, **kwargs):
"""A function that given a path and python script name, runs the script
with kwargs as the command line arguments
Args:
cwd: string representing the directory of the python script
script: string representing the full name of the python script
kwargs: dictionary of string key and values that form the command
line arguments when the script is run.
Returns:
A string representing the raw output of the python script run
Raises:
AssertionError: if the final accuracy is not between the maximum and
minimum percentages
"""
py_version = "python{}".format(sys.version_info[0])
cmd = [py_version, script]
if kwargs:
args = [
str(item) for sublist in kwargs.items() for item in sublist if item != ""
]
cmd.extend(args)
out = subprocess.check_output(cmd, cwd=cwd, universal_newlines=True)
print(out)
return out
def run_test_helper(subprocess_function, total_run_time=None,
total_run_time_tolerance=0.1, **kwargs):
"""Helper function for running tests
Takes in testable parameters, runs the test and checks the relevant
parameters against test results
Args:
subprocess_function: the function that runs a subprocess of
the model in question
total_run_time_range: tuple float representing the expected
upper and lower bounds for the total time taken to run
the test
Returns:
A String representing the raw output of the models subprocess
Raises:
AssertionError: If the accuracy, time taken etc. are not within
the expected bounds
"""
start_time = time.time()
out = subprocess_function(**kwargs)
total_time = time.time() - start_time
if total_run_time:
total_run_time_range = range_from_tolerances(
total_run_time, total_run_time_tolerance
)
assert_total_run_time(total_time, total_run_time_range)
return out
def range_from_tolerances(value, tolerance):
"""Helper function that takes a value and applies the tolerance
Args:
value: a float representing the mean value to which the tolerance
will be applied
tolerance: a float representing a percentage (between 0.0 and 1.0)
which is applied symmetrically across the value argument
Returns:
A tuple of floats, the first element representing the tolerance
applied below the value (minimum) and the second above (maximum)
"""
return (
get_minimum_with_tolerance(value, tolerance),
get_maximum_with_tolerance(value, tolerance),
)
def get_minimum_with_tolerance(value, tolerance):
"""Helper function that takes a value and applies the tolerance
below the value
Args:
value: a float representing the mean value to which the tolerance
will be applied
tolerance: a float representing a percentage (between 0.0 and 1.0)
which is applied to the value argument
Returns:
A float representing the tolerance applied below the value (maximum)
"""
return value * (1 - tolerance)
def get_maximum_with_tolerance(value, tolerance):
"""Helper function that takes a value and applies the tolerance
above the value
Args:
value: a float representing the mean value to which the tolerance
will be applied
tolerance: a float representing a percentage (between 0.0 and 1.0)
which is applied to the value argument
Returns:
A float representing the tolerance applied above the value (minimum)
"""
return value * (1 + tolerance)
def check_data_exists(data_path, expected_files_list):
"""Helper function that checks the expected data exists in a directory
Args:
data_path: A string representing the directory of where the
data is expected to be
expected_files_list: a list of strings representing the expected
file names in the data_path directory
Returns:
A boolean which represents whether the expected files are found in
the data_path directory
"""
if os.path.exists(data_path):
for filename in expected_files_list:
if not os.path.isfile(os.path.join(data_path, filename)):
return False
return True
return False
| parse_results_for_accuracy |
cube.rs | use std::f32;
use std::fmt;
use na::Matrix4;
use na::Vector3;
use na::Vector4;
use primitives::Primitive;
use util::math;
use Hit;
use Ray;
#[derive(fmt::Debug)]
pub struct Cube
{
position: Vector4<f32>,
size: f32,
}
impl Cube
{
pub fn unit_cube() -> Self
{
Cube {
position: Vector4::new(0.0, 0.0, 0.0, 1.0),
size: 1.0
}
}
pub fn new(position: Vector3<f32>, size: f32) -> Self
{
Cube {
position: Vector4::new(position.x, position.y, position.z, 1.0),
size: size,
}
}
}
impl Primitive for Cube
{
fn hit(&self, ray: &Ray, transform: Matrix4<f32>) -> Option<Hit>
{
enum Faces
{
Front,
Back,
Top,
Bottom,
Left,
Right,
}
let point = transform * ray.point();
let origin = transform * ray.origin();
let ray_direction = point - origin;
let inv_direction = Vector4::repeat(1.0).component_div(&ray_direction);
let min = (self.position.x - origin.x) * inv_direction.x;
let max = (self.position.x + self.size - origin.x) * inv_direction.x;
let (mut t_min, mut face_min, mut t_max, mut face_max) = if inv_direction.x >= 0.0 {
(min, Faces::Left, max, Faces::Right)
} else {
(max, Faces::Right, min, Faces::Left)
};
let min = (self.position.y - origin.y) * inv_direction.y;
let max = (self.position.y + self.size - origin.y) * inv_direction.y;
let (ty_min, y_min_face, ty_max, y_max_face) = if inv_direction.y >= 0.0 {
(min, Faces::Bottom, max, Faces::Top)
} else {
(max, Faces::Top, min, Faces::Bottom)
};
if (t_min > ty_max) || (ty_min > t_max) {
return None;
}
if ty_min > t_min {
t_min = ty_min;
face_min = y_min_face;
}
if ty_max < t_max {
t_max = ty_max;
face_max = y_max_face;
}
let min = (self.position.z - origin.z) * inv_direction.z;
let max = (self.position.z + self.size - origin.z) * inv_direction.z;
let (tz_min, z_face_min, tz_max, z_face_max) = if inv_direction.z >= 0.0 {
(min, Faces::Back, max, Faces::Front)
} else {
(max, Faces::Front, min, Faces::Back)
};
if (t_min > tz_max) || (tz_min > t_max) {
return None;
}
if tz_min > t_min {
t_min = tz_min;
face_min = z_face_min;
}
if tz_max < t_max {
t_max = tz_max;
face_max = z_face_max;
}
let (intersect, face) = if math::far_from_zero_pos(t_min) {
(t_min, face_min)
} else if math::far_from_zero_pos(t_max) {
(t_max, face_max)
} else {
return None;
};
let local_normal = match face {
Faces::Right => Vector4::new(1.0, 0.0, 0.0, 0.0),
Faces::Left => Vector4::new(-1.0, 0.0, 0.0, 0.0),
Faces::Top => Vector4::new(0.0, 1.0, 0.0, 0.0),
Faces::Bottom => Vector4::new(0.0, -1.0, 0.0, 0.0),
Faces::Front => Vector4::new(0.0, 0.0, 1.0, 0.0),
Faces::Back => Vector4::new(0.0, 0.0, -1.0, 0.0),
};
let world_normal = math::transform_normals(local_normal, transform);
// TODO: UV value calculation
Some(Hit {
intersect: intersect,
normal: world_normal,
uv: (0.0, 0.0),
})
}
fn | (&self) -> (Vector4<f32>, Vector4<f32>)
{
(self.position, self.position.add_scalar(self.size))
}
}
| get_extents |
fluid_variationalform.py | #!/usr/bin/env python3
# Copyright (c) 2019-2022, Dr.-Ing. Marc Hirschvogel
# All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import ufl
# fluid mechanics variational forms class
# Principle of Virtual Power
# TeX: \delta \mathcal{P} = \delta \mathcal{P}_{\mathrm{kin}} + \delta \mathcal{P}_{\mathrm{int}} - \delta \mathcal{P}_{\mathrm{ext}} = 0, \quad \forall \; \delta\boldsymbol{v}
class variationalform:
def __init__(self, var_v, dv, var_p, dp, n=None):
self.var_v = var_v
self.var_p = var_p
self.dv = dv
self.dp = dp
self.n = n
### Kinetic virtual power
# TeX: \delta \mathcal{P}_{\mathrm{kin}} := \int\limits_{\Omega} \rho \left(\frac{\partial\boldsymbol{v}}{\partial t} + (\boldsymbol{\nabla}\otimes\boldsymbol{v})^{\mathrm{T}}\boldsymbol{v}\right) \cdot \delta\boldsymbol{v} \,\mathrm{d}v
def deltaP_kin(self, a, v, rho, ddomain, v_old=None):
if v_old is None:
return rho*ufl.dot(a + ufl.grad(v) * v, self.var_v)*ddomain
else:
return rho*ufl.dot(a + ufl.grad(v) * v_old, self.var_v)*ddomain
### Internal virtual power
# TeX: \delta \mathcal{P}_{\mathrm{int}} := \int\limits_{\Omega} \boldsymbol{\sigma} : \delta\boldsymbol{\gamma} \,\mathrm{d}v
def deltaP_int(self, sig, ddomain):
# TeX: \int\limits_{\Omega}\boldsymbol{\sigma} : \delta \boldsymbol{\gamma}\,\mathrm{d}v
var_gamma = 0.5*(ufl.grad(self.var_v).T + ufl.grad(self.var_v))
return ufl.inner(sig, var_gamma)*ddomain
def deltaP_int_pres(self, v, ddomain):
# TeX: \int\limits_{\Omega}\mathrm{div}\boldsymbol{v}\,\delta p\,\mathrm{d}v
return ufl.div(v)*self.var_p*ddomain
def residual_v_strong(self, a, v, rho, sig):
return rho*(a + ufl.grad(v) * v) - ufl.div(sig)
def residual_p_strong(self, v):
|
def f_inert(self, a, v, rho):
return rho*(a + ufl.grad(v) * v)
def f_viscous(self, sig):
return ufl.div(dev(sig))
### External virtual power
# Neumann load (Cauchy traction)
# TeX: \int\limits_{\Gamma} \hat{\boldsymbol{t}} \cdot \delta\boldsymbol{v} \,\mathrm{d}a
def deltaP_ext_neumann(self, func, dboundary):
return ufl.dot(func, self.var_v)*dboundary
# Neumann load in normal direction (Cauchy traction)
# TeX: \int\limits_{\Gamma} p\,\boldsymbol{n}\cdot\delta\boldsymbol{v}\;\mathrm{d}a
def deltaP_ext_neumann_normal(self, func, dboundary):
return func*ufl.dot(self.n, self.var_v)*dboundary
# Robin condition (dashpot)
# TeX: \int\limits_{\Gamma} c\,\boldsymbol{v}\cdot\delta\boldsymbol{v}\;\mathrm{d}a
def deltaP_ext_robin_dashpot(self, v, c, dboundary):
return -c*(ufl.dot(v, self.var_v)*dboundary)
# Robin condition (dashpot) in normal direction
# TeX: \int\limits_{\Gamma} (\boldsymbol{n}\otimes \boldsymbol{n})\,c\,\boldsymbol{v}\cdot\delta\boldsymbol{v}\;\mathrm{d}a
def deltaP_ext_robin_dashpot_normal(self, v, c_n, dboundary):
return -c_n*(ufl.dot(v, self.n)*ufl.dot(self.n, self.var_v)*dboundary)
### Flux coupling conditions
# flux
# TeX: \int\limits_{\Gamma} \boldsymbol{n}\cdot\boldsymbol{v}\;\mathrm{d}a
def flux(self, v, dboundary):
return ufl.dot(self.n, v)*dboundary
# surface - derivative of pressure load w.r.t. pressure
# TeX: \int\limits_{\Gamma} \boldsymbol{n}\cdot\delta\boldsymbol{v}\;\mathrm{d}a
def surface(self, dboundary):
return ufl.dot(self.n, self.var_v)*dboundary
| return ufl.div(v) |
nomeme.rs | //! A default implementation for generating memes with "No bitches?" Megamind
use anyhow::{bail, Result};
use async_trait::async_trait;
use either::Either;
use image::io::Reader as ImageReader; | use regex::Regex;
use rusttype::{Font, Scale};
use std::cmp::min;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use crate::utils;
use crate::{Mismatch, MrSnippet, Runner, RunnerOutput};
/// No meme handler. Contains a compiled regex because
/// compiling it again for every message is an animalistic practice
pub struct NoMeme {
regex: Regex,
}
impl NoMeme {
/// Create a new [`NoMeme`] handler. No Snippet?
pub fn new() -> Self {
Self {
regex: Regex::new(r"(?i)no\s+(.*)?\?").unwrap(),
}
}
}
#[async_trait]
impl MrSnippet for NoMeme {
fn dependencies(&self) -> Result<()> {
if Path::new("img/no.png").exists() {
Ok(())
} else {
bail!("img/no.png missing")
}
}
fn name(&self) -> &'static str {
"no meme"
}
async fn try_or_continue(&self, content: &str) -> Either<Runner, Mismatch> {
let text = if let Some(cap) = self.regex.captures_iter(content).next() {
cap.get(1).unwrap().as_str().to_string()
} else {
return Either::Right(Mismatch::Continue);
};
Either::Left(Runner::new("no meme", "no meme", || {
Box::pin(async move {
let path = utils::rand_path_with_extension(".png");
let mut img = ImageReader::open("img/no.png").unwrap().decode().unwrap();
let mut font = Vec::new();
File::open(std::env::var("FONT_PATH").unwrap())
.unwrap()
.read_to_end(&mut font)
.unwrap();
let font = Font::try_from_vec(font).unwrap();
let height = 40.0;
let scale = Scale {
x: height * 2.0,
y: height * 1.2,
};
// TODO: Make better
let mut text = format!("NO {}?", text.to_uppercase());
let mut y = 10;
while !text.is_empty() {
let e = min(text.len(), 17);
let k: String = text.drain(..e).collect();
let x = (17 - k.len()) / 2;
draw_text_mut(
&mut img,
Rgb::from([255u8, 255u8, 255u8]).to_rgba(),
(x as f32 * scale.y) as u32,
y,
scale,
&font,
&k,
);
y += scale.y as u32;
}
img.save(&path).unwrap();
Ok(RunnerOutput::WithFiles("".into(), vec![path.into()], true))
})
}))
}
} | use image::{Pixel, Rgb};
use imageproc::drawing::draw_text_mut; |
process.go | package triangle | "image"
"image/color"
"image/png"
"io"
"os"
"text/template"
"github.com/fogleman/gg"
)
const (
// WithoutWireframe - generates triangles without stroke
WithoutWireframe = iota
// WithWireframe - generates triangles with stroke
WithWireframe
// WireframeOnly - generates triangles only with wireframe
WireframeOnly
)
// Processor type with processing options
type Processor struct {
BlurRadius int
SobelThreshold int
PointsThreshold int
MaxPoints int
Wireframe int
Noise int
StrokeWidth float64
IsSolid bool
Grayscale bool
OutputToSVG bool
OutputInWeb bool
}
// Line defines the SVG line parameters.
type Line struct {
P0 Node
P1 Node
P2 Node
P3 Node
FillColor color.RGBA
StrokeColor color.RGBA
}
// Image extends the Processor struct.
type Image struct {
Processor
}
// SVG extends the Processor struct with the SVG parameters.
type SVG struct {
Width int
Height int
Title string
Lines []Line
Color color.RGBA
Description string
StrokeLineCap string
StrokeWidth float64
Processor
}
// Drawer interface which defines the Draw method.
// This method needs to be implemented by every struct which defines a Draw method.
// This is meant for code reusing and modularity. In our case the image can be triangulated as raster image or SVG.
type Drawer interface {
Draw(io.Reader, io.Writer) ([]Triangle, []Point, error)
}
// Draw triangulate the source image and output the result to an image file.
// It returns the number of triangles generated, the number of points and the error in case exists.
func (im *Image) Draw(input io.Reader, output io.Writer, closure func()) ([]Triangle, []Point, error) {
var srcImg *image.NRGBA
src, _, err := image.Decode(input)
if err != nil {
return nil, nil, err
}
width, height := src.Bounds().Dx(), src.Bounds().Dy()
ctx := gg.NewContext(width, height)
ctx.DrawRectangle(0, 0, float64(width), float64(height))
ctx.SetRGBA(1, 1, 1, 1)
ctx.Fill()
delaunay := &Delaunay{}
img := toNRGBA(src)
blur := StackBlur(img, uint32(im.BlurRadius))
gray := Grayscale(blur)
sobel := SobelFilter(gray, float64(im.SobelThreshold))
points := GetEdgePoints(sobel, im.PointsThreshold, im.MaxPoints)
triangles := delaunay.Init(width, height).Insert(points).GetTriangles()
if im.Grayscale {
srcImg = gray
} else {
srcImg = img
}
for _, t := range triangles {
p0, p1, p2 := t.Nodes[0], t.Nodes[1], t.Nodes[2]
ctx.Push()
ctx.MoveTo(float64(p0.X), float64(p0.Y))
ctx.LineTo(float64(p1.X), float64(p1.Y))
ctx.LineTo(float64(p2.X), float64(p2.Y))
ctx.LineTo(float64(p0.X), float64(p0.Y))
cx := float64(p0.X+p1.X+p2.X) * 0.33333
cy := float64(p0.Y+p1.Y+p2.Y) * 0.33333
j := ((int(cx) | 0) + (int(cy)|0)*width) * 4
r, g, b := srcImg.Pix[j], srcImg.Pix[j+1], srcImg.Pix[j+2]
var strokeColor color.RGBA
if im.IsSolid {
strokeColor = color.RGBA{R: 0, G: 0, B: 0, A: 255}
} else {
strokeColor = color.RGBA{R: r, G: g, B: b, A: 255}
}
switch im.Wireframe {
case WithoutWireframe:
ctx.SetFillStyle(gg.NewSolidPattern(color.RGBA{R: r, G: g, B: b, A: 255}))
ctx.FillPreserve()
ctx.Fill()
case WithWireframe:
ctx.SetFillStyle(gg.NewSolidPattern(color.RGBA{R: r, G: g, B: b, A: 255}))
ctx.SetStrokeStyle(gg.NewSolidPattern(color.RGBA{R: 0, G: 0, B: 0, A: 20}))
ctx.SetLineWidth(im.StrokeWidth)
ctx.FillPreserve()
ctx.StrokePreserve()
ctx.Stroke()
case WireframeOnly:
ctx.SetStrokeStyle(gg.NewSolidPattern(strokeColor))
ctx.SetLineWidth(im.StrokeWidth)
ctx.StrokePreserve()
ctx.Stroke()
}
ctx.Pop()
}
newimg := ctx.Image()
// Apply a noise on the final image. This will give it a more artistic look.
if im.Noise > 0 {
noisyImg := Noise(im.Noise, newimg, newimg.Bounds().Dx(), newimg.Bounds().Dy())
if err = png.Encode(output, noisyImg); err != nil {
return nil, nil, err
}
} else {
if err = png.Encode(output, newimg); err != nil {
return nil, nil, err
}
}
closure()
return triangles, points, err
}
// Draw triangulate the source image and output the result to an SVG file.
// It returns the number of triangles generated, the number of points and the error in case exists.
func (svg *SVG) Draw(input io.Reader, output io.Writer, closure func()) ([]Triangle, []Point, error) {
const SVGTemplate = `<?xml version="1.0" ?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg width="{{.Width}}px" height="{{.Height}}px" viewBox="0 0 {{.Width}} {{.Height}}"
xmlns="http://www.w3.org/2000/svg" version="1.1">
<title>{{.Title}}</title>
<desc>{{.Description}}</desc>
<!-- Points -->
<g stroke-linecap="{{.StrokeLineCap}}" stroke-width="{{.StrokeWidth}}">
{{range .Lines}}
<path
fill="rgba({{.FillColor.R}},{{.FillColor.G}},{{.FillColor.B}},{{.FillColor.A}})"
stroke="rgba({{.StrokeColor.R}},{{.StrokeColor.G}},{{.StrokeColor.B}},{{.StrokeColor.A}})"
d="M{{.P0.X}},{{.P0.Y}} L{{.P1.X}},{{.P1.Y}} L{{.P2.X}},{{.P2.Y}} L{{.P3.X}},{{.P3.Y}}"
/>
{{end}}</g>
</svg>`
var srcImg *image.NRGBA
var (
lines []Line
fillColor color.RGBA
strokeColor color.RGBA
)
src, _, err := image.Decode(input)
if err != nil {
return nil, nil, err
}
width, height := src.Bounds().Dx(), src.Bounds().Dy()
ctx := gg.NewContext(width, height)
ctx.DrawRectangle(0, 0, float64(width), float64(height))
ctx.SetRGBA(1, 1, 1, 1)
ctx.Fill()
delaunay := &Delaunay{}
img := toNRGBA(src)
blur := StackBlur(img, uint32(svg.BlurRadius))
gray := Grayscale(blur)
sobel := SobelFilter(gray, float64(svg.SobelThreshold))
points := GetEdgePoints(sobel, svg.PointsThreshold, svg.MaxPoints)
triangles := delaunay.Init(width, height).Insert(points).GetTriangles()
if svg.Grayscale {
srcImg = gray
} else {
srcImg = img
}
for _, t := range triangles {
p0, p1, p2 := t.Nodes[0], t.Nodes[1], t.Nodes[2]
cx := float64(p0.X+p1.X+p2.X) * 0.33333
cy := float64(p0.Y+p1.Y+p2.Y) * 0.33333
j := ((int(cx) | 0) + (int(cy)|0)*width) * 4
r, g, b := srcImg.Pix[j], srcImg.Pix[j+1], srcImg.Pix[j+2]
if svg.IsSolid {
strokeColor = color.RGBA{R: 0, G: 0, B: 0, A: 255}
} else {
strokeColor = color.RGBA{R: r, G: g, B: b, A: 255}
}
switch svg.Wireframe {
case WithoutWireframe, WithWireframe:
fillColor = color.RGBA{R: r, G: g, B: b, A: 255}
case WireframeOnly:
fillColor = color.RGBA{R: 255, G: 255, B: 255, A: 255}
}
lines = append(lines, []Line{
{
Node{p0.X, p0.Y},
Node{p1.X, p1.Y},
Node{p2.X, p2.Y},
Node{p0.X, p0.Y},
fillColor,
strokeColor,
},
}...)
}
svg.Width = width
svg.Height = height
svg.Lines = lines
tmpl := template.Must(template.New("svg").Parse(SVGTemplate))
if err := tmpl.Execute(output, svg); err != nil {
fmt.Printf("Error: %v\n", err)
os.Exit(1)
}
closure()
return triangles, points, err
}
// toNRGBA converts any image type to *image.NRGBA with min-point at (0, 0).
func toNRGBA(img image.Image) *image.NRGBA {
srcBounds := img.Bounds()
if srcBounds.Min.X == 0 && srcBounds.Min.Y == 0 {
if src0, ok := img.(*image.NRGBA); ok {
return src0
}
}
srcMinX := srcBounds.Min.X
srcMinY := srcBounds.Min.Y
dstBounds := srcBounds.Sub(srcBounds.Min)
dstW := dstBounds.Dx()
dstH := dstBounds.Dy()
dst := image.NewNRGBA(dstBounds)
switch src := img.(type) {
case *image.NRGBA:
rowSize := srcBounds.Dx() * 4
for dstY := 0; dstY < dstH; dstY++ {
di := dst.PixOffset(0, dstY)
si := src.PixOffset(srcMinX, srcMinY+dstY)
for dstX := 0; dstX < dstW; dstX++ {
copy(dst.Pix[di:di+rowSize], src.Pix[si:si+rowSize])
}
}
case *image.YCbCr:
for dstY := 0; dstY < dstH; dstY++ {
di := dst.PixOffset(0, dstY)
for dstX := 0; dstX < dstW; dstX++ {
srcX := srcMinX + dstX
srcY := srcMinY + dstY
siy := src.YOffset(srcX, srcY)
sic := src.COffset(srcX, srcY)
r, g, b := color.YCbCrToRGB(src.Y[siy], src.Cb[sic], src.Cr[sic])
dst.Pix[di+0] = r
dst.Pix[di+1] = g
dst.Pix[di+2] = b
dst.Pix[di+3] = 0xff
di += 4
}
}
default:
for dstY := 0; dstY < dstH; dstY++ {
di := dst.PixOffset(0, dstY)
for dstX := 0; dstX < dstW; dstX++ {
c := color.NRGBAModel.Convert(img.At(srcMinX+dstX, srcMinY+dstY)).(color.NRGBA)
dst.Pix[di+0] = c.R
dst.Pix[di+1] = c.G
dst.Pix[di+2] = c.B
dst.Pix[di+3] = c.A
di += 4
}
}
}
return dst
} |
import (
"fmt" |
admin_test.go | package data
import (
"context"
"reflect"
"testing"
"time"
"github.com/evergreen-ci/evergreen"
"github.com/evergreen-ci/evergreen/apimodels"
"github.com/evergreen-ci/evergreen/db"
"github.com/evergreen-ci/evergreen/mock"
"github.com/evergreen-ci/evergreen/model"
"github.com/evergreen-ci/evergreen/model/build"
"github.com/evergreen-ci/evergreen/model/event"
"github.com/evergreen-ci/evergreen/model/task"
"github.com/evergreen-ci/evergreen/model/user"
restModel "github.com/evergreen-ci/evergreen/rest/model"
"github.com/evergreen-ci/evergreen/testutil"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
)
type AdminDataSuite struct {
ctx Connector
env *mock.Environment
suite.Suite
}
func TestDataConnectorSuite(t *testing.T) {
s := new(AdminDataSuite)
s.ctx = &DBConnector{}
require.NoError(t, db.ClearCollections(evergreen.ConfigCollection, task.Collection, task.OldCollection, build.Collection, model.VersionCollection, event.AllLogCollection), "Error clearing collections")
b := &build.Build{
Id: "buildtest",
Status: evergreen.BuildStarted,
Version: "abc",
}
v := &model.Version{
Id: b.Version,
Status: evergreen.VersionStarted,
}
testTask1 := &task.Task{
Id: "taskToRestart",
Activated: false,
BuildId: b.Id,
Execution: 1,
Project: "sample",
StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local),
Status: evergreen.TaskFailed,
Details: apimodels.TaskEndDetail{
Type: evergreen.CommandTypeTest,
},
}
testTask2 := &task.Task{
Id: "taskThatSucceeded",
Activated: false,
BuildId: b.Id,
Execution: 1,
Project: "sample",
StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local),
Status: evergreen.TaskSucceeded,
}
testTask3 := &task.Task{
Id: "taskOutsideOfTimeRange",
Activated: false, | BuildId: b.Id,
Execution: 1,
Project: "sample",
StartTime: time.Date(2017, time.June, 11, 12, 0, 0, 0, time.Local),
Status: evergreen.TaskFailed,
}
p := &model.ProjectRef{
Identifier: "sample",
}
b.Tasks = []build.TaskCache{
{
Id: testTask1.Id,
},
{
Id: testTask2.Id,
},
{
Id: testTask3.Id,
},
}
require.NoError(t, b.Insert(), "error inserting documents")
require.NoError(t, v.Insert(), "error inserting documents")
require.NoError(t, testTask1.Insert(), "error inserting documents")
require.NoError(t, testTask2.Insert(), "error inserting documents")
require.NoError(t, testTask3.Insert(), "error inserting documents")
require.NoError(t, p.Insert(), "error inserting documents")
suite.Run(t, s)
}
func TestMockConnectorSuite(t *testing.T) {
s := new(AdminDataSuite)
s.ctx = &MockConnector{}
suite.Run(t, s)
}
func (s *AdminDataSuite) SetupSuite() {
s.env = &mock.Environment{}
s.Require().NoError(s.env.Configure(context.Background(), "", nil))
s.Require().NoError(s.env.Local.Start(context.Background()))
}
func (s *AdminDataSuite) TestSetAndGetSettings() {
u := &user.DBUser{Id: "user"}
testSettings := testutil.MockConfig()
// convert the DB model to an API model
restSettings := restModel.NewConfigModel()
err := restSettings.BuildFromService(testSettings)
s.NoError(err)
// try to set the DB model with this API model
oldSettings, err := evergreen.GetConfig()
s.NoError(err)
_, err = s.ctx.SetEvergreenSettings(restSettings, oldSettings, u, true)
s.NoError(err)
// read the settings and spot check values
settingsFromConnector, err := s.ctx.GetEvergreenSettings()
s.NoError(err)
s.EqualValues(testSettings.Banner, settingsFromConnector.Banner)
s.EqualValues(testSettings.ServiceFlags, settingsFromConnector.ServiceFlags)
s.EqualValues(evergreen.Important, testSettings.BannerTheme)
s.EqualValues(testSettings.Alerts.SMTP.From, settingsFromConnector.Alerts.SMTP.From)
s.EqualValues(testSettings.Alerts.SMTP.Port, settingsFromConnector.Alerts.SMTP.Port)
s.Equal(len(testSettings.Alerts.SMTP.AdminEmail), len(settingsFromConnector.Alerts.SMTP.AdminEmail))
s.EqualValues(testSettings.Amboy.Name, settingsFromConnector.Amboy.Name)
s.EqualValues(testSettings.Amboy.LocalStorage, settingsFromConnector.Amboy.LocalStorage)
s.EqualValues(testSettings.Api.HttpListenAddr, settingsFromConnector.Api.HttpListenAddr)
s.EqualValues(testSettings.AuthConfig.LDAP.URL, settingsFromConnector.AuthConfig.LDAP.URL)
s.EqualValues(testSettings.AuthConfig.Naive.Users[0].Username, settingsFromConnector.AuthConfig.Naive.Users[0].Username)
s.EqualValues(testSettings.AuthConfig.Github.ClientId, settingsFromConnector.AuthConfig.Github.ClientId)
s.Equal(len(testSettings.AuthConfig.Github.Users), len(settingsFromConnector.AuthConfig.Github.Users))
s.EqualValues(testSettings.HostInit.SSHTimeoutSeconds, settingsFromConnector.HostInit.SSHTimeoutSeconds)
s.EqualValues(testSettings.Jira.Username, settingsFromConnector.Jira.Username)
s.EqualValues(testSettings.LoggerConfig.DefaultLevel, settingsFromConnector.LoggerConfig.DefaultLevel)
s.EqualValues(testSettings.LoggerConfig.Buffer.Count, settingsFromConnector.LoggerConfig.Buffer.Count)
s.EqualValues(testSettings.Notify.SMTP.From, settingsFromConnector.Notify.SMTP.From)
s.EqualValues(testSettings.Notify.SMTP.Port, settingsFromConnector.Notify.SMTP.Port)
s.Equal(len(testSettings.Notify.SMTP.AdminEmail), len(settingsFromConnector.Notify.SMTP.AdminEmail))
s.EqualValues(testSettings.Providers.AWS.EC2Key, settingsFromConnector.Providers.AWS.EC2Key)
s.EqualValues(testSettings.Providers.Docker.APIVersion, settingsFromConnector.Providers.Docker.APIVersion)
s.EqualValues(testSettings.Providers.GCE.ClientEmail, settingsFromConnector.Providers.GCE.ClientEmail)
s.EqualValues(testSettings.Providers.OpenStack.IdentityEndpoint, settingsFromConnector.Providers.OpenStack.IdentityEndpoint)
s.EqualValues(testSettings.Providers.VSphere.Host, settingsFromConnector.Providers.VSphere.Host)
s.EqualValues(testSettings.RepoTracker.MaxConcurrentRequests, settingsFromConnector.RepoTracker.MaxConcurrentRequests)
s.EqualValues(testSettings.Scheduler.TaskFinder, settingsFromConnector.Scheduler.TaskFinder)
s.EqualValues(testSettings.ServiceFlags.HostInitDisabled, settingsFromConnector.ServiceFlags.HostInitDisabled)
s.EqualValues(testSettings.Slack.Level, settingsFromConnector.Slack.Level)
s.EqualValues(testSettings.Slack.Options.Channel, settingsFromConnector.Slack.Options.Channel)
s.EqualValues(testSettings.Splunk.Channel, settingsFromConnector.Splunk.Channel)
s.EqualValues(testSettings.Ui.HttpListenAddr, settingsFromConnector.Ui.HttpListenAddr)
// the tests below do not apply to the mock connector
if reflect.TypeOf(s.ctx).String() == "*data.MockConnector" {
return
}
// spot check events in the event log
events, err := event.FindAdmin(event.RecentAdminEvents(1000))
s.NoError(err)
foundAlertsEvent := false
foundFlagsEvent := false
foundProvidersEvent := false
foundRootEvent := false
foundUiEvent := false
for _, evt := range events {
s.Equal(event.EventTypeValueChanged, evt.EventType)
data := evt.Data.(*event.AdminEventData)
s.Equal(u.Id, data.User)
switch v := data.Changes.After.(type) {
case *evergreen.AlertsConfig:
foundAlertsEvent = true
s.Equal(testSettings.Alerts.SMTP.From, v.SMTP.From)
s.Equal(testSettings.Alerts.SMTP.Username, v.SMTP.Username)
case *evergreen.ServiceFlags:
foundFlagsEvent = true
s.Equal(testSettings.ServiceFlags.RepotrackerDisabled, v.RepotrackerDisabled)
case *evergreen.CloudProviders:
foundProvidersEvent = true
s.Equal(testSettings.Providers.AWS.EC2Key, v.AWS.EC2Key)
s.Equal(testSettings.Providers.GCE.ClientEmail, v.GCE.ClientEmail)
case *evergreen.Settings:
foundRootEvent = true
s.Equal(testSettings.ClientBinariesDir, v.ClientBinariesDir)
s.Equal(testSettings.Credentials, v.Credentials)
s.Equal(testSettings.SuperUsers, v.SuperUsers)
case *evergreen.UIConfig:
foundUiEvent = true
s.Equal(testSettings.Ui.Url, v.Url)
s.Equal(testSettings.Ui.CacheTemplates, v.CacheTemplates)
}
}
s.True(foundAlertsEvent)
s.True(foundFlagsEvent)
s.True(foundProvidersEvent)
s.True(foundRootEvent)
s.True(foundUiEvent)
// test that updating the model with nil values does not change them
newBanner := "new banner"
newExpansions := map[string]string{"newkey": "newval"}
newHostinit := restModel.APIHostInitConfig{
SSHTimeoutSeconds: 999,
}
updatedSettings := restModel.APIAdminSettings{
Banner: &newBanner,
Expansions: newExpansions,
HostInit: &newHostinit,
}
oldSettings, err = evergreen.GetConfig()
s.NoError(err)
_, err = s.ctx.SetEvergreenSettings(&updatedSettings, oldSettings, u, true)
s.NoError(err)
settingsFromConnector, err = s.ctx.GetEvergreenSettings()
s.NoError(err)
// new values should be set
s.EqualValues(newBanner, settingsFromConnector.Banner)
s.EqualValues(newExpansions, settingsFromConnector.Expansions)
s.EqualValues(newHostinit, settingsFromConnector.HostInit)
// old values should still be there
s.EqualValues(testSettings.ServiceFlags, settingsFromConnector.ServiceFlags)
s.EqualValues(evergreen.Important, testSettings.BannerTheme)
s.EqualValues(testSettings.Alerts.SMTP.From, settingsFromConnector.Alerts.SMTP.From)
s.EqualValues(testSettings.Alerts.SMTP.Port, settingsFromConnector.Alerts.SMTP.Port)
s.Equal(len(testSettings.Alerts.SMTP.AdminEmail), len(settingsFromConnector.Alerts.SMTP.AdminEmail))
s.EqualValues(testSettings.Amboy.Name, settingsFromConnector.Amboy.Name)
s.EqualValues(testSettings.Amboy.LocalStorage, settingsFromConnector.Amboy.LocalStorage)
s.EqualValues(testSettings.Api.HttpListenAddr, settingsFromConnector.Api.HttpListenAddr)
s.EqualValues(testSettings.AuthConfig.LDAP.URL, settingsFromConnector.AuthConfig.LDAP.URL)
s.EqualValues(testSettings.AuthConfig.Naive.Users[0].Username, settingsFromConnector.AuthConfig.Naive.Users[0].Username)
s.EqualValues(testSettings.AuthConfig.Github.ClientId, settingsFromConnector.AuthConfig.Github.ClientId)
s.Equal(len(testSettings.AuthConfig.Github.Users), len(settingsFromConnector.AuthConfig.Github.Users))
s.EqualValues(testSettings.Jira.Username, settingsFromConnector.Jira.Username)
s.EqualValues(testSettings.LoggerConfig.DefaultLevel, settingsFromConnector.LoggerConfig.DefaultLevel)
s.EqualValues(testSettings.LoggerConfig.Buffer.Count, settingsFromConnector.LoggerConfig.Buffer.Count)
s.EqualValues(testSettings.Notify.SMTP.From, settingsFromConnector.Notify.SMTP.From)
s.EqualValues(testSettings.Notify.SMTP.Port, settingsFromConnector.Notify.SMTP.Port)
s.Equal(len(testSettings.Notify.SMTP.AdminEmail), len(settingsFromConnector.Notify.SMTP.AdminEmail))
s.EqualValues(testSettings.Providers.AWS.EC2Key, settingsFromConnector.Providers.AWS.EC2Key)
s.EqualValues(testSettings.Providers.Docker.APIVersion, settingsFromConnector.Providers.Docker.APIVersion)
s.EqualValues(testSettings.Providers.GCE.ClientEmail, settingsFromConnector.Providers.GCE.ClientEmail)
s.EqualValues(testSettings.Providers.OpenStack.IdentityEndpoint, settingsFromConnector.Providers.OpenStack.IdentityEndpoint)
s.EqualValues(testSettings.Providers.VSphere.Host, settingsFromConnector.Providers.VSphere.Host)
s.EqualValues(testSettings.RepoTracker.MaxConcurrentRequests, settingsFromConnector.RepoTracker.MaxConcurrentRequests)
s.EqualValues(testSettings.Scheduler.TaskFinder, settingsFromConnector.Scheduler.TaskFinder)
s.EqualValues(testSettings.ServiceFlags.HostInitDisabled, settingsFromConnector.ServiceFlags.HostInitDisabled)
s.EqualValues(testSettings.Slack.Level, settingsFromConnector.Slack.Level)
s.EqualValues(testSettings.Slack.Options.Channel, settingsFromConnector.Slack.Options.Channel)
s.EqualValues(testSettings.Splunk.Channel, settingsFromConnector.Splunk.Channel)
s.EqualValues(testSettings.Ui.HttpListenAddr, settingsFromConnector.Ui.HttpListenAddr)
}
func (s *AdminDataSuite) TestRestart() {
startTime := time.Date(2017, time.June, 12, 11, 0, 0, 0, time.Local)
endTime := time.Date(2017, time.June, 12, 13, 0, 0, 0, time.Local)
userName := "user"
// test dry run
opts := model.RestartTaskOptions{
DryRun: true,
StartTime: startTime,
EndTime: endTime,
User: userName,
}
dryRunResp, err := s.ctx.RestartFailedTasks(s.env.LocalQueue(), opts)
s.NoError(err)
s.NotZero(len(dryRunResp.TasksRestarted))
s.Nil(dryRunResp.TasksErrored)
// test that restarting tasks successfully puts a job on the queue
opts.DryRun = false
_, err = s.ctx.RestartFailedTasks(s.env.LocalQueue(), opts)
s.NoError(err)
}
func (s *AdminDataSuite) TestGetBanner() {
u := &user.DBUser{Id: "me"}
s.NoError(s.ctx.SetAdminBanner("banner text", u))
s.NoError(s.ctx.SetBannerTheme(evergreen.Important, u))
text, theme, err := s.ctx.GetBanner()
s.NoError(err)
s.Equal("banner text", text)
s.Equal(evergreen.Important, theme)
} | |
pymethod.rs | // Copyright (c) 2017-present PyO3 Project and Contributors
use crate::konst::ConstSpec;
use crate::method::{FnArg, FnSpec, FnType, SelfType};
use crate::utils;
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::{ext::IdentExt, spanned::Spanned};
pub enum PropertyType<'a> {
Descriptor(&'a syn::Field),
Function(&'a FnSpec<'a>),
}
pub fn gen_py_method(
cls: &syn::Type,
sig: &mut syn::Signature,
meth_attrs: &mut Vec<syn::Attribute>,
) -> syn::Result<TokenStream> {
check_generic(sig)?;
let spec = FnSpec::parse(sig, &mut *meth_attrs, true)?;
Ok(match &spec.tp {
FnType::Fn(self_ty) => impl_py_method_def(&spec, &impl_wrap(cls, &spec, self_ty, true)),
FnType::FnNew => impl_py_method_def_new(&spec, &impl_wrap_new(cls, &spec)),
FnType::FnCall(self_ty) => {
impl_py_method_def_call(&spec, &impl_wrap(cls, &spec, self_ty, false))
}
FnType::FnClass => impl_py_method_def_class(&spec, &impl_wrap_class(cls, &spec)),
FnType::FnStatic => impl_py_method_def_static(&spec, &impl_wrap_static(cls, &spec)),
FnType::ClassAttribute => {
impl_py_method_class_attribute(&spec, &impl_wrap_class_attribute(cls, &spec))
}
FnType::Getter(self_ty) => impl_py_getter_def(
&spec.python_name,
&spec.doc,
&impl_wrap_getter(cls, PropertyType::Function(&spec), self_ty)?,
),
FnType::Setter(self_ty) => impl_py_setter_def(
&spec.python_name,
&spec.doc,
&impl_wrap_setter(cls, PropertyType::Function(&spec), self_ty)?,
),
})
}
fn check_generic(sig: &syn::Signature) -> syn::Result<()> {
let err_msg = |typ| format!("a Python method can't have a generic {} parameter", typ);
for param in &sig.generics.params {
match param {
syn::GenericParam::Lifetime(_) => {}
syn::GenericParam::Type(_) => bail_spanned!(param.span() => err_msg("type")),
syn::GenericParam::Const(_) => bail_spanned!(param.span() => err_msg("const")),
}
}
Ok(())
}
pub fn gen_py_const(
cls: &syn::Type,
name: &syn::Ident,
attrs: &mut Vec<syn::Attribute>,
) -> syn::Result<Option<TokenStream>> {
let spec = ConstSpec::parse(name, attrs)?;
if spec.is_class_attr {
let wrapper = quote! {
fn __wrap(py: pyo3::Python<'_>) -> pyo3::PyObject {
pyo3::IntoPy::into_py(#cls::#name, py)
}
};
return Ok(Some(impl_py_const_class_attribute(&spec, &wrapper)));
}
Ok(None)
}
/// Generate function wrapper (PyCFunction, PyCFunctionWithKeywords)
pub fn impl_wrap(
cls: &syn::Type,
spec: &FnSpec<'_>,
self_ty: &SelfType,
noargs: bool,
) -> TokenStream {
let body = impl_call(cls, &spec);
let slf = self_ty.receiver(cls);
impl_wrap_common(cls, spec, noargs, slf, body)
}
fn impl_wrap_common(
cls: &syn::Type,
spec: &FnSpec<'_>,
noargs: bool,
slf: TokenStream,
body: TokenStream,
) -> TokenStream {
let python_name = &spec.python_name;
if spec.args.is_empty() && noargs {
quote! {
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject,
_args: *mut pyo3::ffi::PyObject,
) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(
stringify!(#cls), ".", stringify!(#python_name), "()");
pyo3::callback_body_without_convert!(_py, {
#slf
pyo3::callback::convert(_py, #body)
})
}
}
} else {
let body = impl_arg_params(&spec, Some(cls), body);
quote! {
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject,
_args: *mut pyo3::ffi::PyObject,
_kwargs: *mut pyo3::ffi::PyObject) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(
stringify!(#cls), ".", stringify!(#python_name), "()");
pyo3::callback_body_without_convert!(_py, {
#slf
let _args = _py.from_borrowed_ptr::<pyo3::types::PyTuple>(_args);
let _kwargs: Option<&pyo3::types::PyDict> = _py.from_borrowed_ptr_or_opt(_kwargs);
pyo3::callback::convert(_py, #body)
})
}
}
}
}
/// Generate function wrapper for protocol method (PyCFunction, PyCFunctionWithKeywords)
pub fn impl_proto_wrap(cls: &syn::Type, spec: &FnSpec<'_>, self_ty: &SelfType) -> TokenStream {
let python_name = &spec.python_name;
let cb = impl_call(cls, &spec);
let body = impl_arg_params(&spec, Some(cls), cb);
let slf = self_ty.receiver(cls);
quote! {
#[allow(unused_mut)]
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject,
_args: *mut pyo3::ffi::PyObject,
_kwargs: *mut pyo3::ffi::PyObject) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
#slf
let _args = _py.from_borrowed_ptr::<pyo3::types::PyTuple>(_args);
let _kwargs: Option<&pyo3::types::PyDict> = _py.from_borrowed_ptr_or_opt(_kwargs);
pyo3::callback::convert(_py, #body)
})
}
}
}
/// Generate class method wrapper (PyCFunction, PyCFunctionWithKeywords)
pub fn impl_wrap_new(cls: &syn::Type, spec: &FnSpec<'_>) -> TokenStream {
let name = &spec.name;
let python_name = &spec.python_name;
let names: Vec<syn::Ident> = get_arg_names(&spec);
let cb = quote! { #cls::#name(#(#names),*) };
let body = impl_arg_params(spec, Some(cls), cb);
quote! {
#[allow(unused_mut)]
unsafe extern "C" fn __wrap(
subtype: *mut pyo3::ffi::PyTypeObject,
_args: *mut pyo3::ffi::PyObject,
_kwargs: *mut pyo3::ffi::PyObject) -> *mut pyo3::ffi::PyObject
{
use pyo3::type_object::PyTypeInfo;
use pyo3::callback::IntoPyCallbackOutput;
use std::convert::TryFrom;
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
let _args = _py.from_borrowed_ptr::<pyo3::types::PyTuple>(_args);
let _kwargs: Option<&pyo3::types::PyDict> = _py.from_borrowed_ptr_or_opt(_kwargs);
let initializer: pyo3::PyClassInitializer::<#cls> = #body.convert(_py)?;
let cell = initializer.create_cell_from_subtype(_py, subtype)?;
Ok(cell as *mut pyo3::ffi::PyObject)
})
}
}
}
/// Generate class method wrapper (PyCFunction, PyCFunctionWithKeywords)
pub fn impl_wrap_class(cls: &syn::Type, spec: &FnSpec<'_>) -> TokenStream {
let name = &spec.name;
let python_name = &spec.python_name;
let names: Vec<syn::Ident> = get_arg_names(&spec);
let cb = quote! { #cls::#name(&_cls, #(#names),*) };
let body = impl_arg_params(spec, Some(cls), cb);
quote! {
#[allow(unused_mut)]
unsafe extern "C" fn __wrap(
_cls: *mut pyo3::ffi::PyObject,
_args: *mut pyo3::ffi::PyObject,
_kwargs: *mut pyo3::ffi::PyObject) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
let _cls = pyo3::types::PyType::from_type_ptr(_py, _cls as *mut pyo3::ffi::PyTypeObject);
let _args = _py.from_borrowed_ptr::<pyo3::types::PyTuple>(_args);
let _kwargs: Option<&pyo3::types::PyDict> = _py.from_borrowed_ptr_or_opt(_kwargs);
pyo3::callback::convert(_py, #body)
})
}
}
}
/// Generate static method wrapper (PyCFunction, PyCFunctionWithKeywords)
pub fn | (cls: &syn::Type, spec: &FnSpec<'_>) -> TokenStream {
let name = &spec.name;
let python_name = &spec.python_name;
let names: Vec<syn::Ident> = get_arg_names(&spec);
let cb = quote! { #cls::#name(#(#names),*) };
let body = impl_arg_params(spec, Some(cls), cb);
quote! {
#[allow(unused_mut)]
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject,
_args: *mut pyo3::ffi::PyObject,
_kwargs: *mut pyo3::ffi::PyObject) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
let _args = _py.from_borrowed_ptr::<pyo3::types::PyTuple>(_args);
let _kwargs: Option<&pyo3::types::PyDict> = _py.from_borrowed_ptr_or_opt(_kwargs);
pyo3::callback::convert(_py, #body)
})
}
}
}
/// Generate a wrapper for initialization of a class attribute from a method
/// annotated with `#[classattr]`.
/// To be called in `pyo3::pyclass::initialize_type_object`.
pub fn impl_wrap_class_attribute(cls: &syn::Type, spec: &FnSpec<'_>) -> TokenStream {
let name = &spec.name;
let cb = quote! { #cls::#name() };
quote! {
fn __wrap(py: pyo3::Python<'_>) -> pyo3::PyObject {
pyo3::IntoPy::into_py(#cb, py)
}
}
}
fn impl_call_getter(cls: &syn::Type, spec: &FnSpec) -> syn::Result<TokenStream> {
let (py_arg, args) = split_off_python_arg(&spec.args);
ensure_spanned!(
args.is_empty(),
args[0].ty.span() => "getter function can only have one argument (of type pyo3::Python)"
);
let name = &spec.name;
let fncall = if py_arg.is_some() {
quote!(#cls::#name(_slf, _py))
} else {
quote!(#cls::#name(_slf))
};
Ok(fncall)
}
/// Generate a function wrapper called `__wrap` for a property getter
pub(crate) fn impl_wrap_getter(
cls: &syn::Type,
property_type: PropertyType,
self_ty: &SelfType,
) -> syn::Result<TokenStream> {
let (python_name, getter_impl) = match property_type {
PropertyType::Descriptor(field) => {
let name = field.ident.as_ref().unwrap();
(
name.unraw(),
quote!({
_slf.#name.clone()
}),
)
}
PropertyType::Function(spec) => (spec.python_name.clone(), impl_call_getter(cls, spec)?),
};
let slf = self_ty.receiver(cls);
Ok(quote! {
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject, _: *mut std::os::raw::c_void) -> *mut pyo3::ffi::PyObject
{
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
#slf
pyo3::callback::convert(_py, #getter_impl)
})
}
})
}
fn impl_call_setter(cls: &syn::Type, spec: &FnSpec) -> syn::Result<TokenStream> {
let (py_arg, args) = split_off_python_arg(&spec.args);
if args.is_empty() {
bail_spanned!(spec.name.span() => "setter function expected to have one argument");
} else if args.len() > 1 {
bail_spanned!(
args[1].ty.span() =>
"setter function can have at most two arguments ([pyo3::Python,] and value)"
);
}
let name = &spec.name;
let fncall = if py_arg.is_some() {
quote!(#cls::#name(_slf, _py, _val))
} else {
quote!(#cls::#name(_slf, _val))
};
Ok(fncall)
}
/// Generate a function wrapper called `__wrap` for a property setter
pub(crate) fn impl_wrap_setter(
cls: &syn::Type,
property_type: PropertyType,
self_ty: &SelfType,
) -> syn::Result<TokenStream> {
let (python_name, setter_impl) = match property_type {
PropertyType::Descriptor(field) => {
let name = field.ident.as_ref().unwrap();
(name.unraw(), quote!({ _slf.#name = _val; }))
}
PropertyType::Function(spec) => (spec.python_name.clone(), impl_call_setter(cls, spec)?),
};
let slf = self_ty.receiver(cls);
Ok(quote! {
#[allow(unused_mut)]
unsafe extern "C" fn __wrap(
_slf: *mut pyo3::ffi::PyObject,
_value: *mut pyo3::ffi::PyObject, _: *mut std::os::raw::c_void) -> std::os::raw::c_int
{
const _LOCATION: &'static str = concat!(stringify!(#cls),".",stringify!(#python_name),"()");
pyo3::callback_body_without_convert!(_py, {
#slf
let _value = _py.from_borrowed_ptr::<pyo3::types::PyAny>(_value);
let _val = pyo3::FromPyObject::extract(_value)?;
pyo3::callback::convert(_py, #setter_impl)
})
}
})
}
/// This function abstracts away some copied code and can propably be simplified itself
pub fn get_arg_names(spec: &FnSpec) -> Vec<syn::Ident> {
(0..spec.args.len())
.map(|pos| syn::Ident::new(&format!("arg{}", pos), Span::call_site()))
.collect()
}
fn impl_call(cls: &syn::Type, spec: &FnSpec<'_>) -> TokenStream {
let fname = &spec.name;
let names = get_arg_names(spec);
quote! { #cls::#fname(_slf, #(#names),*) }
}
pub fn impl_arg_params(
spec: &FnSpec<'_>,
self_: Option<&syn::Type>,
body: TokenStream,
) -> TokenStream {
if spec.args.is_empty() {
return quote! {
#body
};
}
let mut params = Vec::new();
for arg in spec.args.iter() {
if arg.py || spec.is_args(&arg.name) || spec.is_kwargs(&arg.name) {
continue;
}
let name = arg.name.unraw().to_string();
let kwonly = spec.is_kw_only(&arg.name);
let opt = arg.optional.is_some() || spec.default_value(&arg.name).is_some();
params.push(quote! {
pyo3::derive_utils::ParamDescription {
name: #name,
is_optional: #opt,
kw_only: #kwonly
}
});
}
let mut param_conversion = Vec::new();
let mut option_pos = 0;
for (idx, arg) in spec.args.iter().enumerate() {
param_conversion.push(impl_arg_param(&arg, &spec, idx, self_, &mut option_pos));
}
let (mut accept_args, mut accept_kwargs) = (false, false);
for s in spec.attrs.iter() {
use crate::pyfunction::Argument;
match s {
Argument::VarArgs(_) => accept_args = true,
Argument::KeywordArgs(_) => accept_kwargs = true,
_ => continue,
}
}
let num_normal_params = params.len();
// create array of arguments, and then parse
quote! {{
const PARAMS: &'static [pyo3::derive_utils::ParamDescription] = &[
#(#params),*
];
let mut output = [None; #num_normal_params];
let mut _args = _args;
let mut _kwargs = _kwargs;
let (_args, _kwargs) = pyo3::derive_utils::parse_fn_args(
Some(_LOCATION),
PARAMS,
_args,
_kwargs,
#accept_args,
#accept_kwargs,
&mut output
)?;
#(#param_conversion)*
#body
}}
}
/// Re option_pos: The option slice doesn't contain the py: Python argument, so the argument
/// index and the index in option diverge when using py: Python
fn impl_arg_param(
arg: &FnArg<'_>,
spec: &FnSpec<'_>,
idx: usize,
self_: Option<&syn::Type>,
option_pos: &mut usize,
) -> TokenStream {
let arg_name = syn::Ident::new(&format!("arg{}", idx), Span::call_site());
if arg.py {
return quote! {
let #arg_name = _py;
};
}
let ty = arg.ty;
let name = arg.name;
let transform_error = quote! {
|e| pyo3::derive_utils::argument_extraction_error(_py, stringify!(#name), e)
};
if spec.is_args(&name) {
return quote! {
let #arg_name = <#ty as pyo3::FromPyObject>::extract(_args.as_ref())
.map_err(#transform_error)?;
};
} else if spec.is_kwargs(&name) {
return quote! {
let #arg_name = _kwargs;
};
}
let arg_value = quote!(output[#option_pos]);
*option_pos += 1;
let default = match (spec.default_value(name), arg.optional.is_some()) {
(Some(default), true) if default.to_string() != "None" => quote! { Some(#default) },
(Some(default), _) => quote! { #default },
(None, true) => quote! { None },
(None, false) => quote! { panic!("Failed to extract required method argument") },
};
return if let syn::Type::Reference(tref) = arg.optional.as_ref().unwrap_or(&ty) {
let (tref, mut_) = preprocess_tref(tref, self_);
let (target_ty, borrow_tmp) = if arg.optional.is_some() {
// Get Option<&T> from Option<PyRef<T>>
(
quote! { Option<<#tref as pyo3::derive_utils::ExtractExt>::Target> },
if mut_.is_some() {
quote! { _tmp.as_deref_mut() }
} else {
quote! { _tmp.as_deref() }
},
)
} else {
// Get &T from PyRef<T>
(
quote! { <#tref as pyo3::derive_utils::ExtractExt>::Target },
quote! { &#mut_ *_tmp },
)
};
quote! {
let #mut_ _tmp: #target_ty = match #arg_value {
Some(_obj) => _obj.extract().map_err(#transform_error)?,
None => #default,
};
let #arg_name = #borrow_tmp;
}
} else {
quote! {
let #arg_name = match #arg_value {
Some(_obj) => _obj.extract().map_err(#transform_error)?,
None => #default,
};
}
};
/// Replace `Self`, remove lifetime and get mutability from the type
fn preprocess_tref(
tref: &syn::TypeReference,
self_: Option<&syn::Type>,
) -> (syn::TypeReference, Option<syn::token::Mut>) {
let mut tref = tref.to_owned();
if let Some(syn::Type::Path(tpath)) = self_ {
replace_self(&mut tref, &tpath.path);
}
tref.lifetime = None;
let mut_ = tref.mutability;
(tref, mut_)
}
/// Replace `Self` with the exact type name since it is used out of the impl block
fn replace_self(tref: &mut syn::TypeReference, self_path: &syn::Path) {
match &mut *tref.elem {
syn::Type::Reference(tref_inner) => replace_self(tref_inner, self_path),
syn::Type::Path(tpath) => {
if let Some(ident) = tpath.path.get_ident() {
if ident == "Self" {
tpath.path = self_path.to_owned();
}
}
}
_ => {}
}
}
}
pub fn impl_py_method_def(spec: &FnSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
let doc = &spec.doc;
if spec.args.is_empty() {
quote! {
pyo3::class::PyMethodDefType::Method({
#wrapper
pyo3::class::PyMethodDef::cfunction(
concat!(stringify!(#python_name), "\0"),
__wrap,
#doc
)
})
}
} else {
quote! {
pyo3::class::PyMethodDefType::Method({
#wrapper
pyo3::class::PyMethodDef::cfunction_with_keywords(
concat!(stringify!(#python_name), "\0"),
__wrap,
0,
#doc
)
})
}
}
}
pub fn impl_py_method_def_new(spec: &FnSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
let doc = &spec.doc;
quote! {
pyo3::class::PyMethodDefType::New({
#wrapper
pyo3::class::PyMethodDef::new_func(concat!(stringify!(#python_name), "\0"), __wrap, #doc)
})
}
}
pub fn impl_py_method_def_class(spec: &FnSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
let doc = &spec.doc;
quote! {
pyo3::class::PyMethodDefType::Class({
#wrapper
pyo3::class::PyMethodDef::cfunction_with_keywords(
concat!(stringify!(#python_name), "\0"),
__wrap,
pyo3::ffi::METH_CLASS,
#doc
)
})
}
}
pub fn impl_py_method_def_static(spec: &FnSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
let doc = &spec.doc;
quote! {
pyo3::class::PyMethodDefType::Static({
#wrapper
pyo3::class::PyMethodDef::cfunction_with_keywords(
concat!(stringify!(#python_name), "\0"),
__wrap,
pyo3::ffi::METH_STATIC,
#doc
)
})
}
}
pub fn impl_py_method_class_attribute(spec: &FnSpec<'_>, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
quote! {
pyo3::class::PyMethodDefType::ClassAttribute({
#wrapper
pyo3::class::PyClassAttributeDef::new(concat!(stringify!(#python_name), "\0"), __wrap)
})
}
}
pub fn impl_py_const_class_attribute(spec: &ConstSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
quote! {
pyo3::class::PyMethodDefType::ClassAttribute({
#wrapper
pyo3::class::PyClassAttributeDef::new(concat!(stringify!(#python_name), "\0"), __wrap)
})
}
}
pub fn impl_py_method_def_call(spec: &FnSpec, wrapper: &TokenStream) -> TokenStream {
let python_name = &spec.python_name;
let doc = &spec.doc;
quote! {
pyo3::class::PyMethodDefType::Call({
#wrapper
pyo3::class::PyMethodDef::call_func(
concat!(stringify!(#python_name), "\0"),
__wrap,
pyo3::ffi::METH_STATIC,
#doc
)
})
}
}
pub(crate) fn impl_py_setter_def(
python_name: &syn::Ident,
doc: &syn::LitStr,
wrapper: &TokenStream,
) -> TokenStream {
quote! {
pyo3::class::PyMethodDefType::Setter({
#wrapper
pyo3::class::PySetterDef::new(concat!(stringify!(#python_name), "\0"), __wrap, #doc)
})
}
}
pub(crate) fn impl_py_getter_def(
python_name: &syn::Ident,
doc: &syn::LitStr,
wrapper: &TokenStream,
) -> TokenStream {
quote! {
pyo3::class::PyMethodDefType::Getter({
#wrapper
pyo3::class::PyGetterDef::new(concat!(stringify!(#python_name), "\0"), __wrap, #doc)
})
}
}
/// Split an argument of pyo3::Python from the front of the arg list, if present
fn split_off_python_arg<'a>(args: &'a [FnArg<'a>]) -> (Option<&FnArg>, &[FnArg]) {
if args
.get(0)
.map(|py| utils::is_python(&py.ty))
.unwrap_or(false)
{
(Some(&args[0]), &args[1..])
} else {
(None, args)
}
}
| impl_wrap_static |
JURISM-EXTRACT.py | #!/usr/bin/python
import os,sys,re,json
TESTING = ['ca', 'un.int']
def sortInfo(a, b):
|
class Courts():
def __init__(self, opt):
self.opt = opt
self.walk()
def checkFile(self, dirname):
ifh = open(os.path.join(dirname,'index.txt'))
while 1:
line = ifh.readline()
if not line:
ifh.close()
break
line = line.strip()
m = re.match("^\.\.\s+category::\s*(.*)$",line)
if m:
name = m.group(1)
ifh.close()
return name
m = re.match("^\.\.\s+court::", line)
if m:
ifh.close()
return None
ifh.close()
raise
def walk(self):
for dirname,dirs,files in os.walk('./data/courts'):
#if dirname == './data/courts': continue
path = os.path.join('jurism','/'.join(dirname.split('/')[3:]))
dlst = dirname.split(os.path.sep)
key = dlst[-1]
if self.opt.testing and len(dlst) > 3 and not dlst[3] in TESTING:
continue
name = self.checkFile(dirname)
if name == None:
continue
# name (not needed)
# key
# path
# immediate child key/name pairs
# count
childJurisdictions = []
for i in range(len(dirs)-1,-1,-1):
d = dirs[i]
subname = self.checkFile(os.path.join(dirname,d))
if subname == None:
dirs.pop(i)
continue
hasChildren = 0
for subchild in os.listdir(os.path.join(dirname,d)):
subchildPath = os.path.join(dirname,d,subchild)
if (os.path.isdir(subchildPath) and self.checkFile(subchildPath)):
hasChildren = 1
break
childJurisdictions.append([d, subname, hasChildren])
#if len(childJurisdictions) == 0:
# continue
# Produce one file for each hit which
# (1) is saved to the path
# (2) is named by the single-element key
# (3) contains the key, name and child count of each entry
try:
os.makedirs(path)
except:
pass
# Sort in reverse order (for stable output - reverse-order sort
# has not special significance)
childJurisdictions.sort(sortInfo)
open(os.path.join(path,'info.json'),'w+').write(json.dumps(childJurisdictions))
sys.stdout.write('.')
sys.stdout.flush()
newCountries = json.loads(open('./tools/country-names.json').read())
countries = json.loads(open('./jurism/info.json').read())
oldCountries = {}
for entry in countries:
oldCountries[entry[0]] = True
for key in newCountries:
if not oldCountries.has_key(key.lower()):
countries.append([key.lower(),newCountries[key],0])
open('./jurism/info.json', 'w+').write(json.dumps(countries))
if __name__ == '__main__':
from ConfigParser import ConfigParser
from optparse import OptionParser
os.environ['LANG'] = "en_US.UTF-8"
usage = '\n%prog [options]'
description="Writes minimal JSON expression of LRR jurisdiction data into source file."
parser = OptionParser(usage=usage,description=description,epilog="And that's all for now!")
parser.add_option("-t", "--t", dest="testing",
default=False,
action="store_true",
help='Output minimal test data only.')
(opt, args) = parser.parse_args()
Courts(opt)
| if a[0] > b[0]:
return -1
elif a[0] < b[0]:
return 1
else:
return 0 |
dominators.rs | //! Compute dominators of a control-flow graph.
//!
//! # The Dominance Relation
//!
//! In a directed graph with a root node **R**, a node **A** is said to *dominate* a
//! node **B** iff every path from **R** to **B** contains **A**.
//!
//! The node **A** is said to *strictly dominate* the node **B** iff **A** dominates
//! **B** and **A ≠ B**.
//!
//! The node **A** is said to be the *immediate dominator* of a node **B** iff it
//! strictly dominates **B** and there does not exist any node **C** where **A**
//! dominates **C** and **C** dominates **B**.
use std::cmp::Ordering;
use std::collections::{hash_map::Iter, HashMap, HashSet};
use std::hash::Hash;
use crate::visit::{DfsPostOrder, GraphBase, IntoNeighbors, Visitable, Walker};
/// The dominance relation for some graph and root.
#[derive(Debug, Clone)]
pub struct Dominators<N>
where
N: Copy + Eq + Hash,
{
root: N,
dominators: HashMap<N, N>,
}
impl<N> Dominators<N>
where
N: Copy + Eq + Hash,
{
/// Get the root node used to construct these dominance relations.
pub fn root(&self) -> N {
self.root
}
/// Get the immediate dominator of the given node.
///
/// Returns `None` for any node that is not reachable from the root, and for
/// the root itself.
pub fn immediate_dominator(&self, node: N) -> Option<N> {
if node == self.root {
None
} else {
self.dominators.get(&node).cloned()
}
}
| ///
/// If the given node is not reachable from the root, then `None` is
/// returned.
pub fn strict_dominators(&self, node: N) -> Option<DominatorsIter<N>> {
if self.dominators.contains_key(&node) {
Some(DominatorsIter {
dominators: self,
node: self.immediate_dominator(node),
})
} else {
None
}
}
/// Iterate over all of the given node's dominators (including the given
/// node itself).
///
/// If the given node is not reachable from the root, then `None` is
/// returned.
pub fn dominators(&self, node: N) -> Option<DominatorsIter<N>> {
if self.dominators.contains_key(&node) {
Some(DominatorsIter {
dominators: self,
node: Some(node),
})
} else {
None
}
}
/// Iterate over all nodes immediately dominated by the given node (not
/// including the given node itself).
pub fn immediately_dominated_by(&self, node: N) -> DominatedByIter<N> {
DominatedByIter {
iter: self.dominators.iter(),
node: node,
}
}
}
/// Iterator for a node's dominators.
pub struct DominatorsIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
dominators: &'a Dominators<N>,
node: Option<N>,
}
impl<'a, N> Iterator for DominatorsIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
type Item = N;
fn next(&mut self) -> Option<Self::Item> {
let next = self.node.take();
if let Some(next) = next {
self.node = self.dominators.immediate_dominator(next);
}
next
}
}
/// Iterator for nodes dominated by a given node.
pub struct DominatedByIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
iter: Iter<'a, N, N>,
node: N,
}
impl<'a, N> Iterator for DominatedByIter<'a, N>
where
N: 'a + Copy + Eq + Hash,
{
type Item = N;
fn next(&mut self) -> Option<Self::Item> {
while let Some(next) = self.iter.next() {
if next.1 == &self.node {
return Some(*next.0);
}
}
None
}
}
/// The undefined dominator sentinel, for when we have not yet discovered a
/// node's dominator.
const UNDEFINED: usize = ::std::usize::MAX;
/// This is an implementation of the engineered ["Simple, Fast Dominance
/// Algorithm"][0] discovered by Cooper et al.
///
/// This algorithm is **O(|V|²)**, and therefore has slower theoretical running time
/// than the Lengauer-Tarjan algorithm (which is **O(|E| log |V|)**. However,
/// Cooper et al found it to be faster in practice on control flow graphs of up
/// to ~30,000 vertices.
///
/// [0]: http://www.cs.rice.edu/~keith/EMBED/dom.pdf
pub fn simple_fast<G>(graph: G, root: G::NodeId) -> Dominators<G::NodeId>
where
G: IntoNeighbors + Visitable,
<G as GraphBase>::NodeId: Eq + Hash,
{
let (post_order, predecessor_sets) = simple_fast_post_order(graph, root);
let length = post_order.len();
debug_assert!(length > 0);
debug_assert!(post_order.last() == Some(&root));
// From here on out we use indices into `post_order` instead of actual
// `NodeId`s wherever possible. This greatly improves the performance of
// this implementation, but we have to pay a little bit of upfront cost to
// convert our data structures to play along first.
// Maps a node to its index into `post_order`.
let node_to_post_order_idx: HashMap<_, _> = post_order
.iter()
.enumerate()
.map(|(idx, &node)| (node, idx))
.collect();
// Maps a node's `post_order` index to its set of predecessors's indices
// into `post_order` (as a vec).
let idx_to_predecessor_vec =
predecessor_sets_to_idx_vecs(&post_order, &node_to_post_order_idx, predecessor_sets);
let mut dominators = vec![UNDEFINED; length];
dominators[length - 1] = length - 1;
let mut changed = true;
while changed {
changed = false;
// Iterate in reverse post order, skipping the root.
for idx in (0..length - 1).rev() {
debug_assert!(post_order[idx] != root);
// Take the intersection of every predecessor's dominator set; that
// is the current best guess at the immediate dominator for this
// node.
let new_idom_idx = {
let mut predecessors = idx_to_predecessor_vec[idx]
.iter()
.filter(|&&p| dominators[p] != UNDEFINED);
let new_idom_idx = predecessors.next().expect(
"Because the root is initialized to dominate itself, and is the \
first node in every path, there must exist a predecessor to this \
node that also has a dominator",
);
predecessors.fold(*new_idom_idx, |new_idom_idx, &predecessor_idx| {
intersect(&dominators, new_idom_idx, predecessor_idx)
})
};
debug_assert!(new_idom_idx < length);
if new_idom_idx != dominators[idx] {
dominators[idx] = new_idom_idx;
changed = true;
}
}
}
// All done! Translate the indices back into proper `G::NodeId`s.
debug_assert!(!dominators.iter().any(|&dom| dom == UNDEFINED));
Dominators {
root,
dominators: dominators
.into_iter()
.enumerate()
.map(|(idx, dom_idx)| (post_order[idx], post_order[dom_idx]))
.collect(),
}
}
fn intersect(dominators: &[usize], mut finger1: usize, mut finger2: usize) -> usize {
loop {
match finger1.cmp(&finger2) {
Ordering::Less => finger1 = dominators[finger1],
Ordering::Greater => finger2 = dominators[finger2],
Ordering::Equal => return finger1,
}
}
}
fn predecessor_sets_to_idx_vecs<N>(
post_order: &[N],
node_to_post_order_idx: &HashMap<N, usize>,
mut predecessor_sets: HashMap<N, HashSet<N>>,
) -> Vec<Vec<usize>>
where
N: Copy + Eq + Hash,
{
post_order
.iter()
.map(|node| {
predecessor_sets
.remove(node)
.map(|predecessors| {
predecessors
.into_iter()
.map(|p| *node_to_post_order_idx.get(&p).unwrap())
.collect()
})
.unwrap_or_else(Vec::new)
})
.collect()
}
fn simple_fast_post_order<G>(
graph: G,
root: G::NodeId,
) -> (Vec<G::NodeId>, HashMap<G::NodeId, HashSet<G::NodeId>>)
where
G: IntoNeighbors + Visitable,
<G as GraphBase>::NodeId: Eq + Hash,
{
let mut post_order = vec![];
let mut predecessor_sets = HashMap::new();
for node in DfsPostOrder::new(graph, root).iter(graph) {
post_order.push(node);
for successor in graph.neighbors(node) {
predecessor_sets
.entry(successor)
.or_insert_with(HashSet::new)
.insert(node);
}
}
(post_order, predecessor_sets)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_iter_dominators() {
let doms: Dominators<u32> = Dominators {
root: 0,
dominators: [(2, 1), (1, 0), (0, 0)].iter().cloned().collect(),
};
let all_doms: Vec<_> = doms.dominators(2).unwrap().collect();
assert_eq!(vec![2, 1, 0], all_doms);
assert_eq!(None::<()>, doms.dominators(99).map(|_| unreachable!()));
let strict_doms: Vec<_> = doms.strict_dominators(2).unwrap().collect();
assert_eq!(vec![1, 0], strict_doms);
assert_eq!(
None::<()>,
doms.strict_dominators(99).map(|_| unreachable!())
);
let dom_by: Vec<_> = doms.immediately_dominated_by(1).collect();
assert_eq!(vec![2], dom_by);
assert_eq!(None, doms.immediately_dominated_by(99).next());
}
} | /// Iterate over the given node's strict dominators. |
prefetch.go | package models
| } | type PrefetchRequest struct {
Timestamp string `json:"ts"` |
version.py | # Copyright 2008-2015 Nokia Networks
# Copyright 2016- Robot Framework Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import sys
# Version number typically updated by running `invoke set-version <version>`.
# Run `invoke --help set-version` or see tasks.py for details.
VERSION = '3.2.3.dev1'
def get_version(naked=False):
if naked:
return re.split('(a|b|rc|.dev)', VERSION)[0]
return VERSION
def get_full_version(program=None, naked=False):
version = '%s %s (%s %s on %s)' % (program or '',
get_version(naked),
get_interpreter(),
sys.version.split()[0],
sys.platform)
return version.strip()
def get_interpreter():
if sys.platform.startswith('java'):
return 'Jython'
if sys.platform == 'cli':
|
if 'PyPy' in sys.version:
return 'PyPy'
return 'Python'
| return 'IronPython' |
instrumental_test.go | package instrumental
import (
"bufio"
"net"
"net/textproto"
"sync"
"testing"
"time"
"github.com/influxdata/telegraf"
"github.com/influxdata/telegraf/metric"
"github.com/stretchr/testify/assert"
)
func TestWrite(t *testing.T) {
var wg sync.WaitGroup
wg.Add(1)
TCPServer(t, &wg)
i := Instrumental{
Host: "127.0.0.1",
APIToken: "abc123token",
Prefix: "my.prefix",
}
// Default to gauge
m1 := metric.New(
"mymeasurement",
map[string]string{"host": "192.168.0.1"},
map[string]interface{}{"myfield": float64(3.14)},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
m2 := metric.New(
"mymeasurement",
map[string]string{"host": "192.168.0.1", "metric_type": "set"},
map[string]interface{}{"value": float64(3.14)},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
metrics := []telegraf.Metric{m1, m2}
i.Write(metrics)
// Counter and Histogram are increments
m3 := metric.New(
"my_histogram",
map[string]string{"host": "192.168.0.1", "metric_type": "histogram"},
map[string]interface{}{"value": float64(3.14)},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
// We will modify metric names that won't be accepted by Instrumental
m4 := metric.New(
"bad_metric_name",
map[string]string{"host": "192.168.0.1:8888::123", "metric_type": "counter"},
map[string]interface{}{"value": 1},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
// We will drop metric values that won't be accepted by Instrumental
m5 := metric.New(
"bad_values",
map[string]string{"host": "192.168.0.1", "metric_type": "counter"},
map[string]interface{}{"value": "\" 3:30\""},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
m6 := metric.New(
"my_counter",
map[string]string{"host": "192.168.0.1", "metric_type": "counter"},
map[string]interface{}{"value": float64(3.14)},
time.Date(2010, time.November, 10, 23, 0, 0, 0, time.UTC),
)
metrics = []telegraf.Metric{m3, m4, m5, m6}
i.Write(metrics)
wg.Wait()
}
func TCPServer(t *testing.T, wg *sync.WaitGroup) {
tcpServer, _ := net.Listen("tcp", "127.0.0.1:8000")
go func() {
defer wg.Done()
conn, _ := tcpServer.Accept()
conn.SetDeadline(time.Now().Add(1 * time.Second))
reader := bufio.NewReader(conn)
tp := textproto.NewReader(reader)
hello, _ := tp.ReadLine()
assert.Equal(t, "hello version go/telegraf/1.1", hello)
auth, _ := tp.ReadLine()
assert.Equal(t, "authenticate abc123token", auth)
conn.Write([]byte("ok\nok\n"))
data1, _ := tp.ReadLine()
assert.Equal(t, "gauge my.prefix.192_168_0_1.mymeasurement.myfield 3.14 1289430000", data1)
data2, _ := tp.ReadLine()
assert.Equal(t, "gauge my.prefix.192_168_0_1.mymeasurement 3.14 1289430000", data2)
conn, _ = tcpServer.Accept()
conn.SetDeadline(time.Now().Add(1 * time.Second))
reader = bufio.NewReader(conn)
tp = textproto.NewReader(reader)
hello, _ = tp.ReadLine()
assert.Equal(t, "hello version go/telegraf/1.1", hello)
auth, _ = tp.ReadLine()
assert.Equal(t, "authenticate abc123token", auth)
conn.Write([]byte("ok\nok\n"))
data3, _ := tp.ReadLine()
assert.Equal(t, "increment my.prefix.192_168_0_1.my_histogram 3.14 1289430000", data3)
data4, _ := tp.ReadLine() | assert.Equal(t, "increment my.prefix.192_168_0_1.my_counter 3.14 1289430000", data5)
data6, _ := tp.ReadLine()
assert.Equal(t, "", data6)
conn.Close()
}()
} | assert.Equal(t, "increment my.prefix.192_168_0_1_8888_123.bad_metric_name 1 1289430000", data4)
data5, _ := tp.ReadLine() |
main.go | // Copyright 2020 Thomas.Hoehenleitner [at] seerose.net
// Use of this source code is governed by a license that can be found in the LICENSE file.
package main
import (
"fmt"
"io"
"math/rand"
"os"
"time"
"github.com/rokath/trice/internal/args"
)
var (
// do not initialize, goreleaser will handle that
version string
// do not initialize, goreleaser will handle that
commit string
// do not initialize, goreleaser will handle that
date string
)
// main is the entry point.
func main() |
// doit is the action.
func doit(w io.Writer) {
// inject values
args.Version = version
args.Commit = commit
args.Date = date
rand.Seed(time.Now().UnixNano())
err := args.Handler(w, os.Args)
if nil != err {
fmt.Fprintln(w, error.Error(err))
}
}
| {
doit(os.Stdout)
} |
scheduler.go | package app
import (
"context"
"flag"
"fmt"
"net/http"
"os"
"github.com/google/uuid"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/spf13/cobra"
"k8s.io/client-go/dynamic"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/clientcmd"
"k8s.io/client-go/tools/leaderelection"
"k8s.io/client-go/tools/leaderelection/resourcelock"
"k8s.io/klog/v2"
"github.com/karmada-io/karmada/cmd/scheduler/app/options"
karmadaclientset "github.com/karmada-io/karmada/pkg/generated/clientset/versioned"
"github.com/karmada-io/karmada/pkg/scheduler"
"github.com/karmada-io/karmada/pkg/version"
"github.com/karmada-io/karmada/pkg/version/sharedcommand"
)
// NewSchedulerCommand creates a *cobra.Command object with default parameters
func | (stopChan <-chan struct{}) *cobra.Command {
opts := options.NewOptions()
cmd := &cobra.Command{
Use: "karmada-scheduler",
Long: `The karmada scheduler binds resources to the clusters it manages.`,
Run: func(cmd *cobra.Command, args []string) {
if err := run(opts, stopChan); err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
},
}
opts.AddFlags(cmd.Flags())
cmd.AddCommand(sharedcommand.NewCmdVersion(os.Stdout, "karmada-scheduler"))
cmd.Flags().AddGoFlagSet(flag.CommandLine)
return cmd
}
func run(opts *options.Options, stopChan <-chan struct{}) error {
klog.Infof("karmada-scheduler version: %s", version.Get())
go serveHealthzAndMetrics(fmt.Sprintf("%s:%d", opts.BindAddress, opts.SecurePort))
restConfig, err := clientcmd.BuildConfigFromFlags(opts.Master, opts.KubeConfig)
if err != nil {
return fmt.Errorf("error building kubeconfig: %s", err.Error())
}
restConfig.QPS, restConfig.Burst = opts.KubeAPIQPS, opts.KubeAPIBurst
dynamicClientSet := dynamic.NewForConfigOrDie(restConfig)
karmadaClient := karmadaclientset.NewForConfigOrDie(restConfig)
kubeClientSet := kubernetes.NewForConfigOrDie(restConfig)
ctx, cancel := context.WithCancel(context.Background())
go func() {
<-stopChan
cancel()
}()
scheduler.Failover = opts.Failover
sched := scheduler.NewScheduler(dynamicClientSet, karmadaClient, kubeClientSet, opts)
if !opts.LeaderElection.LeaderElect {
sched.Run(ctx)
return fmt.Errorf("scheduler exited")
}
leaderElectionClient, err := kubernetes.NewForConfig(rest.AddUserAgent(restConfig, "leader-election"))
if err != nil {
return err
}
hostname, err := os.Hostname()
if err != nil {
return fmt.Errorf("unable to get hostname: %v", err)
}
// add a uniquifier so that two processes on the same host don't accidentally both become active
id := hostname + "_" + uuid.New().String()
rl, err := resourcelock.New(opts.LeaderElection.ResourceLock,
opts.LeaderElection.ResourceNamespace,
opts.LeaderElection.ResourceName,
leaderElectionClient.CoreV1(),
leaderElectionClient.CoordinationV1(),
resourcelock.ResourceLockConfig{
Identity: id,
})
if err != nil {
return fmt.Errorf("couldn't create resource lock: %v", err)
}
leaderelection.RunOrDie(ctx, leaderelection.LeaderElectionConfig{
Lock: rl,
LeaseDuration: opts.LeaderElection.LeaseDuration.Duration,
RenewDeadline: opts.LeaderElection.RenewDeadline.Duration,
RetryPeriod: opts.LeaderElection.RetryPeriod.Duration,
Callbacks: leaderelection.LeaderCallbacks{
OnStartedLeading: sched.Run,
OnStoppedLeading: func() {
klog.Fatalf("leaderelection lost")
},
},
})
return nil
}
func serveHealthzAndMetrics(address string) {
http.HandleFunc("/healthz", func(w http.ResponseWriter, _ *http.Request) {
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte("ok"))
})
http.Handle("/metrics", promhttp.Handler())
klog.Fatal(http.ListenAndServe(address, nil))
}
| NewSchedulerCommand |
paginator.rs | // Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
/// Paginator for [`ListGatewayRoutes`](crate::operation::ListGatewayRoutes)
pub struct ListGatewayRoutesPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_gateway_routes_input::Builder,
}
impl<C, M, R> ListGatewayRoutesPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_gateway_routes_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `gateway_routes`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListGatewayRoutesPaginatorItems<C, M, R> {
crate::paginator::ListGatewayRoutesPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListGatewayRoutesOutput,
aws_smithy_http::result::SdkError<crate::error::ListGatewayRoutesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListGatewayRoutesInputOperationOutputAlias,
crate::output::ListGatewayRoutesOutput,
crate::error::ListGatewayRoutesError,
crate::input::ListGatewayRoutesInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_gateway_routes_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListMeshes`](crate::operation::ListMeshes)
pub struct ListMeshesPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_meshes_input::Builder,
}
impl<C, M, R> ListMeshesPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_meshes_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `meshes`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListMeshesPaginatorItems<C, M, R> {
crate::paginator::ListMeshesPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListMeshesOutput,
aws_smithy_http::result::SdkError<crate::error::ListMeshesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListMeshesInputOperationOutputAlias,
crate::output::ListMeshesOutput,
crate::error::ListMeshesError,
crate::input::ListMeshesInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_meshes_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListRoutes`](crate::operation::ListRoutes)
pub struct ListRoutesPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_routes_input::Builder,
}
impl<C, M, R> ListRoutesPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_routes_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `routes`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListRoutesPaginatorItems<C, M, R> {
crate::paginator::ListRoutesPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListRoutesOutput,
aws_smithy_http::result::SdkError<crate::error::ListRoutesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListRoutesInputOperationOutputAlias,
crate::output::ListRoutesOutput,
crate::error::ListRoutesError,
crate::input::ListRoutesInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_routes_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListTagsForResource`](crate::operation::ListTagsForResource)
pub struct ListTagsForResourcePaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_tags_for_resource_input::Builder,
}
impl<C, M, R> ListTagsForResourcePaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_tags_for_resource_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `tags`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListTagsForResourcePaginatorItems<C, M, R> {
crate::paginator::ListTagsForResourcePaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListTagsForResourceOutput,
aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListTagsForResourceInputOperationOutputAlias,
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
crate::input::ListTagsForResourceInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_tags_for_resource_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListVirtualGateways`](crate::operation::ListVirtualGateways)
pub struct ListVirtualGatewaysPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_gateways_input::Builder,
}
impl<C, M, R> ListVirtualGatewaysPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_gateways_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `virtual_gateways`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListVirtualGatewaysPaginatorItems<C, M, R> {
crate::paginator::ListVirtualGatewaysPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListVirtualGatewaysOutput,
aws_smithy_http::result::SdkError<crate::error::ListVirtualGatewaysError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualGatewaysInputOperationOutputAlias,
crate::output::ListVirtualGatewaysOutput,
crate::error::ListVirtualGatewaysError,
crate::input::ListVirtualGatewaysInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_virtual_gateways_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListVirtualNodes`](crate::operation::ListVirtualNodes)
pub struct ListVirtualNodesPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_nodes_input::Builder,
}
impl<C, M, R> ListVirtualNodesPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_nodes_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `virtual_nodes`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListVirtualNodesPaginatorItems<C, M, R> {
crate::paginator::ListVirtualNodesPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListVirtualNodesOutput,
aws_smithy_http::result::SdkError<crate::error::ListVirtualNodesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualNodesInputOperationOutputAlias,
crate::output::ListVirtualNodesOutput,
crate::error::ListVirtualNodesError,
crate::input::ListVirtualNodesInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_virtual_nodes_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListVirtualRouters`](crate::operation::ListVirtualRouters)
pub struct ListVirtualRoutersPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_routers_input::Builder,
}
impl<C, M, R> ListVirtualRoutersPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_routers_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self {
self.builder.limit = Some(limit);
self
}
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `virtual_routers`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListVirtualRoutersPaginatorItems<C, M, R> {
crate::paginator::ListVirtualRoutersPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListVirtualRoutersOutput,
aws_smithy_http::result::SdkError<crate::error::ListVirtualRoutersError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualRoutersInputOperationOutputAlias,
crate::output::ListVirtualRoutersOutput,
crate::error::ListVirtualRoutersError,
crate::input::ListVirtualRoutersInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_virtual_routers_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Paginator for [`ListVirtualServices`](crate::operation::ListVirtualServices)
pub struct ListVirtualServicesPaginator<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
> {
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_services_input::Builder,
}
impl<C, M, R> ListVirtualServicesPaginator<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create a new paginator-wrapper
pub(crate) fn new(
handle: std::sync::Arc<crate::client::Handle<C, M, R>>,
builder: crate::input::list_virtual_services_input::Builder,
) -> Self {
Self { handle, builder }
}
/// Set the page size
///
/// _Note: this method will override any previously set value for `limit`_
pub fn page_size(mut self, limit: i32) -> Self |
/// Create a flattened paginator
///
/// This paginator automatically flattens results using `virtual_services`. Queries to the underlying service
/// are dispatched lazily.
pub fn items(self) -> crate::paginator::ListVirtualServicesPaginatorItems<C, M, R> {
crate::paginator::ListVirtualServicesPaginatorItems(self)
}
/// Create the pagination stream
///
/// _Note:_ No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next)).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::output::ListVirtualServicesOutput,
aws_smithy_http::result::SdkError<crate::error::ListVirtualServicesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualServicesInputOperationOutputAlias,
crate::output::ListVirtualServicesOutput,
crate::error::ListVirtualServicesError,
crate::input::ListVirtualServicesInputOperationRetryAlias,
>,
{
// Move individual fields out of self for the borrow checker
let builder = self.builder;
let handle = self.handle;
aws_smithy_async::future::fn_stream::FnStream::new(move |tx| {
Box::pin(async move {
// Build the input for the first time. If required fields are missing, this is where we'll produce an early error.
let mut input = match builder.build().map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(input) => input,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
loop {
let op = match input.make_operation(&handle.conf).await.map_err(|err| {
aws_smithy_http::result::SdkError::ConstructionFailure(err.into())
}) {
Ok(op) => op,
Err(e) => {
let _ = tx.send(Err(e)).await;
return;
}
};
let resp = handle.client.call(op).await;
// If the input member is None or it was an error
let done = match resp {
Ok(ref resp) => {
let new_token = crate::lens::reflens_structure_crate_output_list_virtual_services_output_next_token(resp);
let is_empty = new_token.map(|token| token.is_empty()).unwrap_or(true);
if !is_empty && new_token == input.next_token.as_ref() {
let _ = tx.send(Err(aws_smithy_http::result::SdkError::ConstructionFailure("next token did not change, aborting paginator. This indicates an SDK or AWS service bug.".into()))).await;
return;
}
input.next_token = new_token.cloned();
is_empty
}
Err(_) => true,
};
if tx.send(resp).await.is_err() {
// receiving end was dropped
return;
}
if done {
return;
}
}
})
})
}
}
/// Flattened paginator for `ListGatewayRoutesPaginator`
///
/// This is created with [`.items()`](ListGatewayRoutesPaginator::items)
pub struct ListGatewayRoutesPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListGatewayRoutesPaginator<C, M, R>);
impl<C, M, R> ListGatewayRoutesPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::GatewayRouteRef,
aws_smithy_http::result::SdkError<crate::error::ListGatewayRoutesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListGatewayRoutesInputOperationOutputAlias,
crate::output::ListGatewayRoutesOutput,
crate::error::ListGatewayRoutesError,
crate::input::ListGatewayRoutesInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_gateway_routes_output_gateway_routes(page)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListMeshesPaginator`
///
/// This is created with [`.items()`](ListMeshesPaginator::items)
pub struct ListMeshesPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListMeshesPaginator<C, M, R>);
impl<C, M, R> ListMeshesPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::MeshRef,
aws_smithy_http::result::SdkError<crate::error::ListMeshesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListMeshesInputOperationOutputAlias,
crate::output::ListMeshesOutput,
crate::error::ListMeshesError,
crate::input::ListMeshesInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_meshes_output_meshes(page)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListRoutesPaginator`
///
/// This is created with [`.items()`](ListRoutesPaginator::items)
pub struct ListRoutesPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListRoutesPaginator<C, M, R>);
impl<C, M, R> ListRoutesPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::RouteRef,
aws_smithy_http::result::SdkError<crate::error::ListRoutesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListRoutesInputOperationOutputAlias,
crate::output::ListRoutesOutput,
crate::error::ListRoutesError,
crate::input::ListRoutesInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_routes_output_routes(page)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListTagsForResourcePaginator`
///
/// This is created with [`.items()`](ListTagsForResourcePaginator::items)
pub struct ListTagsForResourcePaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListTagsForResourcePaginator<C, M, R>);
impl<C, M, R> ListTagsForResourcePaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::TagRef,
aws_smithy_http::result::SdkError<crate::error::ListTagsForResourceError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListTagsForResourceInputOperationOutputAlias,
crate::output::ListTagsForResourceOutput,
crate::error::ListTagsForResourceError,
crate::input::ListTagsForResourceInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_tags_for_resource_output_tags(page)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListVirtualGatewaysPaginator`
///
/// This is created with [`.items()`](ListVirtualGatewaysPaginator::items)
pub struct ListVirtualGatewaysPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListVirtualGatewaysPaginator<C, M, R>);
impl<C, M, R> ListVirtualGatewaysPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::VirtualGatewayRef,
aws_smithy_http::result::SdkError<crate::error::ListVirtualGatewaysError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualGatewaysInputOperationOutputAlias,
crate::output::ListVirtualGatewaysOutput,
crate::error::ListVirtualGatewaysError,
crate::input::ListVirtualGatewaysInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_virtual_gateways_output_virtual_gateways(
page,
)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListVirtualNodesPaginator`
///
/// This is created with [`.items()`](ListVirtualNodesPaginator::items)
pub struct ListVirtualNodesPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListVirtualNodesPaginator<C, M, R>);
impl<C, M, R> ListVirtualNodesPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::VirtualNodeRef,
aws_smithy_http::result::SdkError<crate::error::ListVirtualNodesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualNodesInputOperationOutputAlias,
crate::output::ListVirtualNodesOutput,
crate::error::ListVirtualNodesError,
crate::input::ListVirtualNodesInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_virtual_nodes_output_virtual_nodes(page)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListVirtualRoutersPaginator`
///
/// This is created with [`.items()`](ListVirtualRoutersPaginator::items)
pub struct ListVirtualRoutersPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListVirtualRoutersPaginator<C, M, R>);
impl<C, M, R> ListVirtualRoutersPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::VirtualRouterRef,
aws_smithy_http::result::SdkError<crate::error::ListVirtualRoutersError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualRoutersInputOperationOutputAlias,
crate::output::ListVirtualRoutersOutput,
crate::error::ListVirtualRoutersError,
crate::input::ListVirtualRoutersInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_virtual_routers_output_virtual_routers(
page,
)
.unwrap_or_default()
.into_iter()
})
}
}
/// Flattened paginator for `ListVirtualServicesPaginator`
///
/// This is created with [`.items()`](ListVirtualServicesPaginator::items)
pub struct ListVirtualServicesPaginatorItems<
C = aws_smithy_client::erase::DynConnector,
M = crate::middleware::DefaultMiddleware,
R = aws_smithy_client::retry::Standard,
>(ListVirtualServicesPaginator<C, M, R>);
impl<C, M, R> ListVirtualServicesPaginatorItems<C, M, R>
where
C: aws_smithy_client::bounds::SmithyConnector,
M: aws_smithy_client::bounds::SmithyMiddleware<C>,
R: aws_smithy_client::retry::NewRequestPolicy,
{
/// Create the pagination stream
///
/// _Note: No requests will be dispatched until the stream is used (eg. with [`.next().await`](tokio_stream::StreamExt::next))._
///
/// To read the entirety of the paginator, use [`.collect::<Result<Vec<_>, _>()`](tokio_stream::StreamExt::collect).
pub fn send(
self,
) -> impl tokio_stream::Stream<
Item = std::result::Result<
crate::model::VirtualServiceRef,
aws_smithy_http::result::SdkError<crate::error::ListVirtualServicesError>,
>,
> + Unpin
where
R::Policy: aws_smithy_client::bounds::SmithyRetryPolicy<
crate::input::ListVirtualServicesInputOperationOutputAlias,
crate::output::ListVirtualServicesOutput,
crate::error::ListVirtualServicesError,
crate::input::ListVirtualServicesInputOperationRetryAlias,
>,
{
aws_smithy_async::future::fn_stream::TryFlatMap::new(self.0.send()).flat_map(|page| {
crate::lens::lens_structure_crate_output_list_virtual_services_output_virtual_services(
page,
)
.unwrap_or_default()
.into_iter()
})
}
}
| {
self.builder.limit = Some(limit);
self
} |
test_handler.py | import json
import pytest
import os
import sys
abs_path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(f'{abs_path}/../..')
sys.path.append(f'{abs_path}/../../..')
print(sys.path[-1])
from moto import mock_dynamodb2
from redirect_handler import app
import boto_utils
from constants import TABLE_NAME
import boto3
@pytest.fixture()
def apigw_event():
""" Generates API GW Event"""
with open('./events/redirect_simple_event.json') as fp:
return json.load(fp)
def | (apigw_event):
# Note put must work. You should have a test entry in your DB under the entry '1234567' for you to pass this test
@mock_dynamodb2
def mock_events():
dynamodb = boto3.resource('dynamodb')
created_table = dynamodb.create_table(
TableName=TABLE_NAME,
KeySchema=[
{
'AttributeName': 'redirect_url',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'redirect_url',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 5,
'WriteCapacityUnits': 5
}
)
boto_utils.put('https://example.com', '1234567', '', '')
mock_events()
ret = app.lambda_handler(apigw_event, '')
assert ret['statusCode'] == 302
assert 'location' in ret['headers']
failed_codes = {206, 204}
apigw_event['pathParameters']['hash'] = apigw_event['pathParameters']['hash'][:-1]
ret = app.lambda_handler(apigw_event, '')
assert ret['statusCode'] in failed_codes
apigw_event['pathParameters']['hash'] = 'garbage'
ret = app.lambda_handler(apigw_event, '')
assert ret['statusCode'] in failed_codes
| test_lambda_handler |
least_cost_xmission.py | # -*- coding: utf-8 -*-
"""
Module to compute least cost xmission paths, distances, and costs one or
more SC points
"""
from concurrent.futures import as_completed
import geopandas as gpd
import json
import logging
import numpy as np
import os
import pandas as pd
from pyproj.crs import CRS
import rasterio
from scipy.spatial import cKDTree
from shapely.geometry import Point
import time
from reV.handlers.exclusions import ExclusionLayers
from reV.supply_curve.points import SupplyCurveExtent
from rex.utilities.execution import SpawnProcessPool
from rex.utilities.loggers import log_mem
from reVX.least_cost_xmission.config import (TRANS_LINE_CAT, LOAD_CENTER_CAT,
SINK_CAT, SUBSTATION_CAT)
from reVX.least_cost_xmission.least_cost_paths import LeastCostPaths
from reVX.least_cost_xmission.trans_cap_costs import TransCapCosts
logger = logging.getLogger(__name__)
class LeastCostXmission(LeastCostPaths):
"""
Compute Least Cost tie-line paths and full transmission cap cost
for all possible connections to all supply curve points
-
"""
REQUIRED_LAYRES = ['transmission_barrier', 'ISO_regions']
def __init__(self, cost_fpath, features_fpath, resolution=128,
xmission_config=None):
"""
Parameters
----------
cost_fpath : str
Path to h5 file with cost rasters and other required layers
features_fpath : str
Path to geopackage with transmission features
resolution : int, optional
SC point resolution, by default 128
xmission_config : str | dict | XmissionConfig, optional
Path to Xmission config .json, dictionary of Xmission config
.jsons, or preloaded XmissionConfig objects, by default None
"""
self._check_layers(cost_fpath)
self._config = TransCapCosts._parse_config(
xmission_config=xmission_config)
(self._sc_points, self._features,
self._sub_lines_mapping, self._shape) =\
self._map_to_costs(cost_fpath, features_fpath,
resolution=resolution)
self._cost_fpath = cost_fpath
self._tree = None
self._sink_coords = None
self._min_line_len = (resolution * 0.09) / 2
logger.debug('{} initialized'.format(self))
def __repr__(self):
msg = ("{} to be computed for {} sc_points and {} features"
.format(self.__class__.__name__,
len(self.sc_points),
len(self.features)))
return msg
@property
def sc_points(self):
"""
Table of supply curve points
Returns
-------
gpd.GeoDataFrame
"""
return self._sc_points
@property
def features(self):
"""
Table of features to compute paths for
Returns
-------
pandas.DataFrame
"""
return self._features
@property
def | (self):
"""
Series mapping substations to the transmission lines connected
to each substation
Returns
-------
pandas.Series
"""
return self._sub_lines_mapping
@property
def sink_coords(self):
"""
Inf sink coordinates (row, col)
Returns
-------
ndarray
"""
if self._sink_coords is None:
mask = self.features['category'] == SINK_CAT
self._sink_coords = self.features.loc[mask, ['row', 'col']].values
return self._sink_coords
@property
def sink_tree(self):
"""
cKDTree for infinite sinks
Returns
-------
cKDTree
"""
if self._tree is None:
self._tree = cKDTree(self.sink_coords)
return self._tree
@staticmethod
def _load_trans_feats(features_fpath):
"""
Load existing transmission features from disk. Substations will be
loaded from cache file if it exists
Parameters
----------
features_fpath : str
Path to geopackage with trans features
Returns
-------
features : gpd.GeoDataFrame
DataFrame of transmission features
sub_line_map : pandas.Series
Mapping of sub-station trans_gid to connected tranmission line
trans_gids
"""
logger.debug('Loading transmission features')
features = gpd.read_file(features_fpath)
features = features.drop(columns=['bgid', 'egid', 'cap_left'],
errors='ignore')
mapping = {'gid': 'trans_gid', 'trans_gids': 'trans_line_gids'}
features = features.rename(columns=mapping)
features['min_volts'] = 0
features['max_volts'] = 0
# Transmission lines
mask = features['category'] == TRANS_LINE_CAT
voltage = features.loc[mask, 'voltage'].values
features.loc[mask, 'min_volts'] = voltage
features.loc[mask, 'max_volts'] = voltage
# Load Center and Sinks
mask = features['category'].isin([LOAD_CENTER_CAT, SINK_CAT])
features.loc[mask, 'min_volts'] = 1
features.loc[mask, 'max_volts'] = 9999
sub_lines_map = {}
mask = features['category'] == SUBSTATION_CAT
bad_subs = np.zeros(len(features), dtype=bool)
for idx, row in features.loc[mask].iterrows():
gid = row['trans_gid']
lines = row['trans_line_gids']
if isinstance(lines, str):
lines = json.loads(lines)
sub_lines_map[gid] = lines
lines_mask = features['trans_gid'].isin(lines)
voltage = features.loc[lines_mask, 'voltage'].values
if np.max(voltage) >= 69:
features.loc[idx, 'min_volts'] = np.min(voltage)
features.loc[idx, 'max_volts'] = np.max(voltage)
else:
bad_subs[idx] = True
if any(bad_subs):
msg = ("The following sub-stations do not have the minimum "
"required voltage of 69 kV and will be dropped:\n{}"
.format(features.loc[bad_subs, 'trans_gid']))
logger.warning(msg)
features = features.loc[~bad_subs].reset_index(drop=True)
return features, pd.Series(sub_lines_map)
@staticmethod
def _create_sc_points(cost_fpath, resolution=128):
"""
Load SC points, covert row/col to array wide, and determine x/y for
reV projection
Parameters
----------
cost_fpath : str
Path to h5 file with cost rasters and other required layers
resolution : int, optional
SC point resolution, by default 128
Returns
sc_points : gpd.GeoDataFrame
SC points
"""
logger.debug('Loading Supply Curve Points')
sce = SupplyCurveExtent(cost_fpath, resolution=resolution)
sc_points = sce.points.rename(columns={'row_ind': 'sc_row_ind',
'col_ind': 'sc_col_ind'})
shape = sce.excl_shape
sc_points['sc_point_gid'] = sc_points.index.values
row = np.round(sc_points['sc_row_ind'] * resolution + resolution / 2)
row = np.where(row >= shape[0], shape[0] - 1, row)
sc_points['row'] = row.astype(int)
col = np.round(sc_points['sc_col_ind'] * resolution + resolution / 2)
col = np.where(col >= shape[1], shape[1] - 1, col)
sc_points['col'] = col.astype(int)
return sc_points
@staticmethod
def _get_feature_cost_indices(features, crs, transform, shape):
"""
Map features to cost row, col indicies using rasterio transform
Parameters
----------
features : gpd.GeoDataFrame
GeoDataFrame of features to map to cost raster
crs : pyproj.crs.CRS
CRS of cost raster
transform : raster.Affine
Transform of cost raster
shape : tuple
Cost raster shape
Returns
-------
row : ndarray
Vector of row indicies for each feature
col : ndarray
Vector of col indicies for each features
mask : ndarray
Boolean mask of features with indicies outside of cost raster
"""
row, col, mask = super(LeastCostXmission,
LeastCostXmission)._get_feature_cost_indices(
features, crs, transform, shape)
t_lines = features['category'] == TRANS_LINE_CAT
mask |= t_lines
row[t_lines] = np.where(row[t_lines] >= 0, row[t_lines], 0)
row[t_lines] = np.where(row[t_lines] < shape[0], row[t_lines],
shape[0] - 1)
col[t_lines] = np.where(col[t_lines] >= 0, col[t_lines], 0)
col[t_lines] = np.where(col[t_lines] < shape[1], col[t_lines],
shape[1] - 1)
return row, col, mask
@classmethod
def _map_to_costs(cls, cost_fpath, features_fpath, resolution=128):
"""
Map supply curve points and transmission features to cost array pixel
indices
Parameters
----------
cost_fpath : str
Path to h5 file with cost rasters and other required layers
features_fpath : str
Path to geopackage with transmission features
resolution : int, optional
SC point resolution, by default 128
Returns
-------
sc_point : gpd.GeoDataFrame
Table of supply curve points to connect to tranmission
features : gpd.GeoDataFrame
Table of transmission features
sub_lines_map : pandas.Series
Series mapping substations to the transmission lines connected
to each substation
"""
with ExclusionLayers(cost_fpath) as f:
crs = CRS.from_string(f.crs)
transform = rasterio.Affine(*f.profile['transform'])
shape = f.shape
regions = f['ISO_regions']
features, sub_lines_map = cls._load_trans_feats(features_fpath)
row, col, mask = cls._get_feature_cost_indices(features, crs,
transform, shape)
if any(~mask):
msg = ("The following features are outside of the cost exclusion "
"domain and will be dropped:\n{}"
.format(features.loc[~mask, 'trans_gid']))
logger.warning(msg)
row = row[mask]
col = col[mask]
features = features.loc[mask].reset_index(drop=True)
features['row'] = row
features['col'] = col
features['region'] = regions[row, col]
logger.debug('Converting SC points to GeoDataFrame')
sc_points = cls._create_sc_points(cost_fpath, resolution=resolution)
x, y = rasterio.transform.xy(transform, sc_points['row'].values,
sc_points['col'].values)
geo = [Point(xy) for xy in zip(x, y)]
sc_points = gpd.GeoDataFrame(sc_points, crs=features.crs,
geometry=geo)
return sc_points, features, sub_lines_map, shape
def _clip_to_sc_point(self, sc_point, tie_line_voltage, nn_sinks=2,
clipping_buffer=1.05):
"""
Clip costs raster to AOI around SC point, and get substations,
load centers, and sinks within the clipped region.
Parameters
----------
sc_point : gpd.GeoSeries
SC point to clip raster around
nn_sinks : int, optional
Number of nearest neighbor sinks to clip to
clipping_buffer : float, optional
Buffer to increase clipping radius by, by default 1.05
Returns
-------
radius : int
Clipping radius in cost raster pixels
x_feats : pd.DataFrame
Substatations, load centers, sinks, and nearest points on t-lines
to SC point
"""
logger.debug('Clipping features to sc_point {}'.format(sc_point.name))
if len(self.sink_coords) > 2:
row, col = sc_point[['row', 'col']].values
_, pos = self.sink_tree.query([row, col], k=nn_sinks)
radius = np.abs(self.sink_coords[pos] - np.array([row, col])).max()
radius = int(np.ceil(radius * clipping_buffer))
logger.debug('Radius to {} nearest sink is: {}'
.format(nn_sinks, radius))
row_min = max(row - radius, 0)
row_max = min(row + radius, self._shape[0])
col_min = max(col - radius, 0)
col_max = min(col + radius, self._shape[1])
logger.debug('Extracting all transmission features in the row '
'slice {}:{} and column slice {}:{}'
.format(row_min, row_max, col_min, col_max))
# Clip transmission features
mask = self.features['row'] >= row_min
mask &= self.features['row'] < row_max
mask &= self.features['col'] >= col_min
mask &= self.features['col'] < col_max
sc_features = self.features.loc[mask].copy(deep=True)
logger.debug('{} transmission features found in clipped area with '
'radius {}'
.format(len(sc_features), radius))
else:
radius = None
sc_features = self.features.copy(deep=True)
mask = self.features['max_volts'] >= tie_line_voltage
sc_features = sc_features.loc[mask].copy(deep=True)
logger.debug('{} transmission features found in clipped area with '
'minimum max voltage of {}'
.format(len(sc_features), tie_line_voltage))
# Find t-lines connected to substations within clip
logger.debug('Collecting transmission lines connected to substations')
mask = sc_features['category'] == SUBSTATION_CAT
if mask.any():
trans_gids = sc_features.loc[mask, 'trans_gid'].values
trans_gids = \
np.concatenate(self.sub_lines_mapping.loc[trans_gids].values)
trans_gids = np.unique(trans_gids)
line_mask = self.features['trans_gid'].isin(trans_gids)
trans_lines = self.features.loc[line_mask].copy(deep=True)
line_mask = trans_lines['trans_gid'].isin(sc_features['trans_gid'])
trans_lines = trans_lines.loc[~line_mask]
logger.debug('Adding all {} transmission lines connected to '
'substations with minimum max voltage of {}'
.format(len(trans_lines), tie_line_voltage))
sc_features = sc_features.append(trans_lines)
return sc_features, radius
def process_sc_points(self, capacity_class, sc_point_gids=None, nn_sinks=2,
clipping_buffer=1.05, barrier_mult=100,
max_workers=None):
"""
Compute Least Cost Tranmission for desired sc_points
Parameters
----------
capacity_class : str | int
Capacity class of transmission features to connect supply curve
points to
sc_point_gids : list, optional
List of sc_point_gids to connect to, by default None
nn_sinks : int, optional
Number of nearest neighbor sinks to use for clipping radius
calculation, by default 2
clipping_buffer : float, optional
Buffer to expand clipping radius by, by default 1.05
barrier_mult : int, optional
Tranmission barrier multiplier, used when computing the least
cost tie-line path, by default 100
max_workers : int, optional
Number of workers to use for processing, if 1 run in serial,
if None use all available cores, by default None
Returns
-------
least_costs : pandas.DataFrame
Least cost connections between all supply curve points and the
transmission features with the given capacity class that are within
"nn_sink" nearest infinite sinks
"""
max_workers = os.cpu_count() if max_workers is None else max_workers
if sc_point_gids is None:
sc_point_gids = self.sc_points['sc_point_gid'].values
tie_line_voltage = self._config.capacity_to_kv(capacity_class)
least_costs = []
if max_workers > 1:
logger.info('Computing Least Cost Transmission for SC points in '
'parallel on {} workers'.format(max_workers))
loggers = [__name__, 'reV', 'reVX']
with SpawnProcessPool(max_workers=max_workers,
loggers=loggers) as exe:
futures = []
for _, sc_point in self.sc_points.iterrows():
gid = sc_point['sc_point_gid']
if gid in sc_point_gids:
sc_features, radius = self._clip_to_sc_point(
sc_point, tie_line_voltage, nn_sinks=nn_sinks,
clipping_buffer=clipping_buffer)
future = exe.submit(TransCapCosts.run,
self._cost_fpath,
sc_point.copy(deep=True),
sc_features, capacity_class,
radius=radius,
xmission_config=self._config,
barrier_mult=barrier_mult,
min_line_length=self._min_line_len)
futures.append(future)
for i, future in enumerate(as_completed(futures)):
sc_costs = future.result()
if sc_costs is not None:
least_costs.append(sc_costs)
logger.debug('SC point {} of {} complete!'
.format(i + 1, len(futures)))
log_mem(logger)
else:
logger.info('Computing Least Cost Transmission for SC points in '
'serial')
i = 1
for _, sc_point in self.sc_points.iterrows():
gid = sc_point['sc_point_gid']
if gid in sc_point_gids:
sc_features, radius = self._clip_to_sc_point(
sc_point, tie_line_voltage, nn_sinks=nn_sinks,
clipping_buffer=clipping_buffer)
sc_costs = TransCapCosts.run(
self._cost_fpath, sc_point.copy(deep=True),
sc_features, capacity_class,
radius=radius,
xmission_config=self._config,
barrier_mult=barrier_mult,
min_line_length=self._min_line_len)
if sc_costs is not None:
least_costs.append(sc_costs)
logger.debug('SC point {} of {} complete!'
.format(i, len(sc_point_gids)))
log_mem(logger)
i += 1
least_costs = pd.concat(least_costs).sort_values(['sc_point_gid',
'trans_gid'])
capacity_class = self._config._parse_cap_class(capacity_class)
least_costs['max_cap'] = self._config['power_classes'][capacity_class]
lcp_frac = (len(least_costs['sc_point_gid'].unique())
/ len(sc_point_gids) * 100)
logger.info('{:.4f}% of requested sc point gids were succesfully '
'mapped to transmission features'.format(lcp_frac))
return least_costs.reset_index(drop=True)
@classmethod
def run(cls, cost_fpath, features_fpath, capacity_class, resolution=128,
xmission_config=None, sc_point_gids=None, nn_sinks=2,
clipping_buffer=1.05, barrier_mult=100, max_workers=None):
"""
Find Least Cost Tranmission connections between desired sc_points to
given tranmission features for desired capacity class
Parameters
----------
cost_fpath : str
Path to h5 file with cost rasters and other required layers
features_fpath : str
Path to geopackage with transmission features
capacity_class : str | int
Capacity class of transmission features to connect supply curve
points to
resolution : int, optional
SC point resolution, by default 128
xmission_config : str | dict | XmissionConfig, optional
Path to Xmission config .json, dictionary of Xmission config
.jsons, or preloaded XmissionConfig objects, by default None
sc_point_gids : list, optional
List of sc_point_gids to connect to, by default None
nn_sinks : int, optional
Number of nearest neighbor sinks to use for clipping radius
calculation, by default 2
clipping_buffer : float, optional
Buffer to expand clipping radius by, by default 1.05
barrier_mult : int, optional
Tranmission barrier multiplier, used when computing the least
cost tie-line path, by default 100
max_workers : int, optional
Number of workers to use for processing, if 1 run in serial,
if None use all available cores, by default None
Returns
-------
least_costs : pandas.DataFrame
Least cost connections between all supply curve points and the
transmission features with the given capacity class that are within
"nn_sink" nearest infinite sinks
"""
ts = time.time()
lcx = cls(cost_fpath, features_fpath, resolution=resolution,
xmission_config=xmission_config)
least_costs = lcx.process_sc_points(capacity_class,
sc_point_gids=sc_point_gids,
nn_sinks=nn_sinks,
clipping_buffer=clipping_buffer,
barrier_mult=barrier_mult,
max_workers=max_workers)
logger.info('{} connections were made to {} SC points in {:.4f} '
'minutes'
.format(len(least_costs),
len(least_costs['sc_point_gid'].unique()),
(time.time() - ts) / 60))
return least_costs
| sub_lines_mapping |
store.go | package store
import (
"code.cloudfoundry.org/go-loggregator/metrics"
"container/heap"
"regexp"
"sync"
"sync/atomic"
"time"
"code.cloudfoundry.org/go-loggregator/rpc/loggregator_v2"
"code.cloudfoundry.org/log-cache/pkg/rpc/logcache_v1"
"github.com/emirpasic/gods/trees/avltree"
"github.com/emirpasic/gods/utils"
)
type MetricsRegistry interface {
NewCounter(name string, opts ...metrics.MetricOption) metrics.Counter
NewGauge(name string, opts ...metrics.MetricOption) metrics.Gauge
}
// MemoryConsultant is used to determine if the store should prune.
type MemoryConsultant interface {
// Prune returns the number of envelopes to prune.
GetQuantityToPrune(int64) int
// setMemoryReporter accepts a reporting function for Memory Utilization
SetMemoryReporter(metrics.Gauge)
}
const MIN_INT64 = int64(^uint64(0) >> 1)
// Store is an in-memory data store for envelopes. It will store envelopes up
// to a per-source threshold and evict oldest data first, as instructed by the
// Pruner. All functions are thread safe.
type Store struct {
storageIndex sync.Map
initializationMutex sync.Mutex
// count is incremented/decremented atomically during Put
count int64
oldestTimestamp int64
maxPerSource int
maxTimestampFudge int64
metrics Metrics
mc MemoryConsultant
truncationCompleted chan bool
}
type Metrics struct {
expired metrics.Counter
cachePeriod metrics.Gauge
ingress metrics.Counter
egress metrics.Counter
storeSize metrics.Gauge
truncationDuration metrics.Gauge
memoryUtilization metrics.Gauge
}
func NewStore(maxPerSource int, mc MemoryConsultant, m MetricsRegistry) *Store {
store := &Store{
maxPerSource: maxPerSource,
maxTimestampFudge: 4000,
oldestTimestamp: MIN_INT64,
metrics: registerMetrics(m),
mc: mc,
truncationCompleted: make(chan bool),
}
store.mc.SetMemoryReporter(store.metrics.memoryUtilization)
go store.truncationLoop(500 * time.Millisecond)
return store
}
func registerMetrics(m MetricsRegistry) Metrics {
return Metrics{
expired: m.NewCounter(
"log_cache_expired",
metrics.WithHelpText("total_expired_envelopes"),
),
cachePeriod: m.NewGauge(
"log_cache_cache_period",
metrics.WithHelpText("Cache period in milliseconds. Calculated as the difference between the oldest envelope timestamp and now."),
metrics.WithMetricTags(map[string]string{"unit": "milliseconds"}),
),
ingress: m.NewCounter(
"log_cache_ingress",
metrics.WithHelpText("Total envelopes ingressed."),
),
egress: m.NewCounter(
"log_cache_egress",
metrics.WithHelpText("Total envelopes retrieved from the store."),
),
storeSize: m.NewGauge(
"log_cache_store_size",
metrics.WithHelpText("Current number of envelopes in the store."),
metrics.WithMetricTags(map[string]string{"unit": "entries"}),
),
//TODO convert to histogram
truncationDuration: m.NewGauge(
"log_cache_truncation_duration",
metrics.WithHelpText("Duration of last truncation in milliseconds."),
metrics.WithMetricTags(map[string]string{"unit": "milliseconds"}),
),
memoryUtilization: m.NewGauge(
"log_cache_memory_utilization",
metrics.WithHelpText("Percentage of system memory in use by log cache. Calculated as heap memory in use divided by system memory."),
metrics.WithMetricTags(map[string]string{"unit": "percentage"}),
),
}
}
func (store *Store) getOrInitializeStorage(sourceId string) (*storage, bool) {
var newStorage bool
store.initializationMutex.Lock()
defer store.initializationMutex.Unlock()
envelopeStorage, existingSourceId := store.storageIndex.Load(sourceId)
if !existingSourceId {
envelopeStorage = &storage{
sourceId: sourceId,
Tree: avltree.NewWith(utils.Int64Comparator),
}
store.storageIndex.Store(sourceId, envelopeStorage.(*storage))
newStorage = true
}
return envelopeStorage.(*storage), newStorage
}
func (storage *storage) insertOrSwap(store *Store, e *loggregator_v2.Envelope) {
storage.Lock()
defer storage.Unlock()
// If we're at our maximum capacity, remove an envelope before inserting
if storage.Size() >= store.maxPerSource {
oldestTimestamp := storage.Left().Key.(int64)
storage.Remove(oldestTimestamp)
storage.meta.Expired++
store.metrics.expired.Add(1)
} else {
atomic.AddInt64(&store.count, 1)
store.metrics.storeSize.Set(float64(atomic.LoadInt64(&store.count)))
}
var timestampFudge int64
for timestampFudge = 0; timestampFudge < store.maxTimestampFudge; timestampFudge++ {
_, exists := storage.Get(e.Timestamp + timestampFudge)
if !exists {
break
}
}
storage.Put(e.Timestamp+timestampFudge, e)
if e.Timestamp > storage.meta.NewestTimestamp {
storage.meta.NewestTimestamp = e.Timestamp
}
oldestTimestamp := storage.Left().Key.(int64)
storage.meta.OldestTimestamp = oldestTimestamp
storeOldestTimestamp := atomic.LoadInt64(&store.oldestTimestamp)
if oldestTimestamp < storeOldestTimestamp {
atomic.StoreInt64(&store.oldestTimestamp, oldestTimestamp)
storeOldestTimestamp = oldestTimestamp
}
cachePeriod := calculateCachePeriod(storeOldestTimestamp)
store.metrics.cachePeriod.Set(float64(cachePeriod))
}
func (store *Store) WaitForTruncationToComplete() bool {
return <-store.truncationCompleted
}
func (store *Store) sendTruncationCompleted(status bool) {
select {
case store.truncationCompleted <- status:
// fmt.Println("Truncation ended with status", status)
default:
// Don't block if the channel has no receiver
}
}
func (store *Store) truncationLoop(runInterval time.Duration) {
t := time.NewTimer(runInterval)
for {
// Wait for our timer to go off
<-t.C
startTime := time.Now()
store.truncate()
t.Reset(runInterval)
store.metrics.truncationDuration.Set(float64(time.Since(startTime) / time.Millisecond))
}
}
func (store *Store) Put(envelope *loggregator_v2.Envelope, sourceId string) {
store.metrics.ingress.Add(1)
envelopeStorage, _ := store.getOrInitializeStorage(sourceId)
envelopeStorage.insertOrSwap(store, envelope)
}
func (store *Store) BuildExpirationHeap() *ExpirationHeap {
expirationHeap := &ExpirationHeap{}
heap.Init(expirationHeap)
store.storageIndex.Range(func(sourceId interface{}, tree interface{}) bool {
tree.(*storage).RLock()
oldestTimestamp := tree.(*storage).Left().Key.(int64)
heap.Push(expirationHeap, storageExpiration{timestamp: oldestTimestamp, sourceId: sourceId.(string), tree: tree.(*storage)})
tree.(*storage).RUnlock()
return true
})
return expirationHeap
}
// truncate removes the n oldest envelopes across all trees
func (store *Store) truncate() {
storeCount := atomic.LoadInt64(&store.count)
numberToPrune := store.mc.GetQuantityToPrune(storeCount)
if numberToPrune == 0 {
store.sendTruncationCompleted(false)
return
}
// Just make sure we don't try to prune more entries than we have
if numberToPrune > int(storeCount) {
numberToPrune = int(storeCount)
}
expirationHeap := store.BuildExpirationHeap()
// Remove envelopes one at a time, popping state from the expirationHeap
for i := 0; i < numberToPrune; i++ {
oldest := heap.Pop(expirationHeap)
newOldestTimestamp, valid := store.removeOldestEnvelope(oldest.(storageExpiration).tree, oldest.(storageExpiration).sourceId)
if valid {
heap.Push(expirationHeap, storageExpiration{timestamp: newOldestTimestamp, sourceId: oldest.(storageExpiration).sourceId, tree: oldest.(storageExpiration).tree})
}
}
// Always update our store size metric and close out the channel when we return
defer func() {
store.metrics.storeSize.Set(float64(atomic.LoadInt64(&store.count)))
store.sendTruncationCompleted(true)
}()
// If there's nothing left on the heap, our store is empty, so we can
// reset everything to default values and bail out
if expirationHeap.Len() == 0 {
atomic.StoreInt64(&store.oldestTimestamp, MIN_INT64)
store.metrics.cachePeriod.Set(0)
return
}
// Otherwise, grab the next oldest timestamp and use it to update the cache period
if oldest := expirationHeap.Pop(); oldest.(storageExpiration).tree != nil {
atomic.StoreInt64(&store.oldestTimestamp, oldest.(storageExpiration).timestamp)
cachePeriod := calculateCachePeriod(oldest.(storageExpiration).timestamp)
store.metrics.cachePeriod.Set(float64(cachePeriod))
}
}
func (store *Store) removeOldestEnvelope(treeToPrune *storage, sourceId string) (int64, bool) {
treeToPrune.Lock()
defer treeToPrune.Unlock()
if treeToPrune.Size() == 0 {
return 0, false
}
atomic.AddInt64(&store.count, -1)
store.metrics.expired.Add(1)
oldestEnvelope := treeToPrune.Left()
treeToPrune.Remove(oldestEnvelope.Key.(int64))
if treeToPrune.Size() == 0 {
store.storageIndex.Delete(sourceId)
return 0, false
}
newOldestEnvelope := treeToPrune.Left()
oldestTimestampAfterRemoval := newOldestEnvelope.Key.(int64)
treeToPrune.meta.Expired++
treeToPrune.meta.OldestTimestamp = oldestTimestampAfterRemoval
return oldestTimestampAfterRemoval, true
}
// Get fetches envelopes from the store based on the source ID, start and end
// time. Start is inclusive while end is not: [start..end).
func (store *Store) Get(
index string,
start time.Time,
end time.Time,
envelopeTypes []logcache_v1.EnvelopeType,
nameFilter *regexp.Regexp,
limit int,
descending bool,
) []*loggregator_v2.Envelope {
tree, ok := store.storageIndex.Load(index)
if !ok {
return nil
}
tree.(*storage).RLock()
defer tree.(*storage).RUnlock()
traverser := store.treeAscTraverse
if descending {
traverser = store.treeDescTraverse
}
var res []*loggregator_v2.Envelope
traverser(tree.(*storage).Root, start.UnixNano(), end.UnixNano(), func(e *loggregator_v2.Envelope) bool {
e = store.filterByName(e, nameFilter)
if e == nil {
return false
}
if store.validEnvelopeType(e, envelopeTypes) {
res = append(res, e)
}
// Return true to stop traversing
return len(res) >= limit
})
store.metrics.egress.Add(float64(len(res)))
return res
}
func (store *Store) filterByName(envelope *loggregator_v2.Envelope, nameFilter *regexp.Regexp) *loggregator_v2.Envelope {
if nameFilter == nil {
return envelope
}
switch envelope.Message.(type) {
case *loggregator_v2.Envelope_Counter:
if nameFilter.MatchString(envelope.GetCounter().GetName()) {
return envelope
}
// TODO: refactor?
case *loggregator_v2.Envelope_Gauge:
filteredMetrics := make(map[string]*loggregator_v2.GaugeValue)
envelopeMetrics := envelope.GetGauge().GetMetrics()
for metricName, gaugeValue := range envelopeMetrics {
if !nameFilter.MatchString(metricName) {
continue
}
filteredMetrics[metricName] = gaugeValue
}
if len(filteredMetrics) > 0 {
return &loggregator_v2.Envelope{
Timestamp: envelope.Timestamp,
SourceId: envelope.SourceId,
InstanceId: envelope.InstanceId,
DeprecatedTags: envelope.DeprecatedTags,
Tags: envelope.Tags,
Message: &loggregator_v2.Envelope_Gauge{
Gauge: &loggregator_v2.Gauge{
Metrics: filteredMetrics,
},
},
}
}
case *loggregator_v2.Envelope_Timer:
if nameFilter.MatchString(envelope.GetTimer().GetName()) {
return envelope
}
}
return nil
}
func (s *Store) validEnvelopeType(e *loggregator_v2.Envelope, types []logcache_v1.EnvelopeType) bool {
if types == nil {
return true
}
for _, t := range types {
if s.checkEnvelopeType(e, t) {
return true
}
}
return false
}
func (s *Store) treeAscTraverse(
n *avltree.Node,
start int64,
end int64,
f func(e *loggregator_v2.Envelope) bool,
) bool {
if n == nil {
return false
}
e := n.Value.(*loggregator_v2.Envelope)
t := e.GetTimestamp()
if t >= start {
if s.treeAscTraverse(n.Children[0], start, end, f) {
return true
}
if (t >= end || f(e)) && !isNodeAFudgeSequenceMember(n, 1) {
return true
}
}
return s.treeAscTraverse(n.Children[1], start, end, f)
}
func isNodeAFudgeSequenceMember(node *avltree.Node, nextChildIndex int) bool |
func (s *Store) treeDescTraverse(
n *avltree.Node,
start int64,
end int64,
f func(e *loggregator_v2.Envelope) bool,
) bool {
if n == nil {
return false
}
e := n.Value.(*loggregator_v2.Envelope)
t := e.GetTimestamp()
if t < end {
if s.treeDescTraverse(n.Children[1], start, end, f) {
return true
}
if (t < start || f(e)) && !isNodeAFudgeSequenceMember(n, 0) {
return true
}
}
return s.treeDescTraverse(n.Children[0], start, end, f)
}
func (s *Store) checkEnvelopeType(e *loggregator_v2.Envelope, t logcache_v1.EnvelopeType) bool {
if t == logcache_v1.EnvelopeType_ANY {
return true
}
switch t {
case logcache_v1.EnvelopeType_LOG:
return e.GetLog() != nil
case logcache_v1.EnvelopeType_COUNTER:
return e.GetCounter() != nil
case logcache_v1.EnvelopeType_GAUGE:
return e.GetGauge() != nil
case logcache_v1.EnvelopeType_TIMER:
return e.GetTimer() != nil
case logcache_v1.EnvelopeType_EVENT:
return e.GetEvent() != nil
default:
// This should never happen. This implies the store is being used
// poorly.
panic("unknown type")
}
}
// Meta returns each source ID tracked in the store.
func (store *Store) Meta() map[string]logcache_v1.MetaInfo {
metaReport := make(map[string]logcache_v1.MetaInfo)
store.storageIndex.Range(func(sourceId interface{}, tree interface{}) bool {
tree.(*storage).RLock()
metaReport[sourceId.(string)] = tree.(*storage).meta
tree.(*storage).RUnlock()
return true
})
// Range over our local copy of metaReport
// TODO - shouldn't we just maintain Count on metaReport..?!
for sourceId, meta := range metaReport {
tree, _ := store.storageIndex.Load(sourceId)
tree.(*storage).RLock()
meta.Count = int64(tree.(*storage).Size())
tree.(*storage).RUnlock()
metaReport[sourceId] = meta
}
return metaReport
}
type storage struct {
sourceId string
meta logcache_v1.MetaInfo
*avltree.Tree
sync.RWMutex
}
type ExpirationHeap []storageExpiration
type storageExpiration struct {
timestamp int64
sourceId string
tree *storage
}
func (h ExpirationHeap) Len() int { return len(h) }
func (h ExpirationHeap) Less(i, j int) bool { return h[i].timestamp < h[j].timestamp }
func (h ExpirationHeap) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
func (h *ExpirationHeap) Push(x interface{}) {
*h = append(*h, x.(storageExpiration))
}
func (h *ExpirationHeap) Pop() interface{} {
old := *h
n := len(old)
x := old[n-1]
*h = old[0 : n-1]
return x
}
func calculateCachePeriod(oldestTimestamp int64) int64 {
return (time.Now().UnixNano() - oldestTimestamp) / int64(time.Millisecond)
}
| {
e := node.Value.(*loggregator_v2.Envelope)
timestamp := e.GetTimestamp()
// check if node is internal to a fudge sequence
if timestamp != node.Key.(int64) {
return true
}
// node is not internal, but could initiate a fudge sequence, so
// check next child
nextChild := node.Children[nextChildIndex]
if nextChild == nil {
return false
}
// if next child exists, check it for fudge sequence membership.
// if the child's timestamps don't match, then the parent is the first
// member of a fudge sequence.
nextEnvelope := nextChild.Value.(*loggregator_v2.Envelope)
return (nextEnvelope.GetTimestamp() != nextChild.Key.(int64))
} |
wr_data.rs | #[doc = "Register `WR_DATA` writer"]
pub struct W(crate::W<WR_DATA_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<WR_DATA_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<WR_DATA_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<WR_DATA_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `CRC_WR_DATA` writer - Data written to this register will be taken to perform CRC calculation with selected bit order and 1's complement pre-process. Any write size 8, 16 or 32-bit are allowed and accept back-to-back transactions."]
pub struct CRC_WR_DATA_W<'a> {
w: &'a mut W,
}
impl<'a> CRC_WR_DATA_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u32) -> &'a mut W {
self.w.bits = value as u32;
self.w
}
}
impl W {
#[doc = "Bits 0:31 - Data written to this register will be taken to perform CRC calculation with selected bit order and 1's complement pre-process. Any write size 8, 16 or 32-bit are allowed and accept back-to-back transactions."]
#[inline(always)]
pub fn crc_wr_data(&mut self) -> CRC_WR_DATA_W {
CRC_WR_DATA_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "CRC data register\n\nThis register you can [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [wr_data](index.html) module"]
pub struct WR_DATA_SPEC;
impl crate::RegisterSpec for WR_DATA_SPEC {
type Ux = u32;
}
#[doc = "`write(|w| ..)` method takes [wr_data::W](W) writer structure"]
impl crate::Writable for WR_DATA_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets WR_DATA to value 0"]
impl crate::Resettable for WR_DATA_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0 | } | } |
Player.ts | import { Base } from '@dyno.gg/dyno-core';
import * as eris from '@dyno.gg/eris';
import * as dyno from 'Dyno';
import { PlayerManager } from 'eris-lavalink';
import * as moment from 'moment';
import Queue from './Queue';
import Resolver from './Resolver';
require('moment-duration-format');
/**
* Dyno music player
* @class Player
* @extends Base
*/
export default class Player extends Base {
public cooldown : number = null;
public errorCooldown : number = null;
public guild : eris.Guild;
public module : any;
public node : any = null;
public nowPlayingId : string = null;
public player : any = null;
public playing : boolean = false;
public queue : Queue;
public track : any;
public resolver : Resolver;
public startTime : number = null;
public stopping : boolean = false;
public timers : any[] = [];
public textChannel : ErisChannel;
public voiceChannel : ErisChannel;
public volume : number = null;
public votes : Set<string> = new Set();
public isDead : boolean = false; |
const guildConfig = data.guildConfig;
let textChannel;
guildConfig.music = guildConfig.music || {};
if (guildConfig.music.channel) {
textChannel = data.guild.channels.get(guildConfig.music.channel);
}
this.textChannel = textChannel || data.textChannel;
this.voiceChannel = data.voiceChannel;
this.guild = data.guild;
this.module = module;
this.resolver = new Resolver(this.dyno, {
guild : data.guild,
guildConfig : data.guildConfig,
module : this,
version : data.version,
});
}
public async getQueue() {
const queue = new Queue(this.dyno, this.guild);
await queue.getQueue();
this.queue = queue;
return this.queue;
}
/**
* Join the voice channel
*/
public connect(): Promise<eris.VoiceConnection> {
if (this.voiceChannel == undefined || this.guild == undefined) {
return Promise.reject('Not a guild channel.');
}
const missingPermissions = this.isMissingPermissions('voiceConnect', 'voiceSpeak', 'voiceUseVAD');
if (missingPermissions != undefined) {
return Promise.reject(`I don't have connect, speak, or use voice activity permissions in that channel.`);
}
if (this.client.voiceConnections) {
const player = this.client.voiceConnections.get(this.guild.id);
if (player) {
return Promise.resolve(player);
}
}
const options: any = {};
if (this.guild.region != undefined) {
options.region = this.guild.region;
}
const voiceConnections: PlayerManager = this.client.voiceConnections;
return this.client.joinVoiceChannel(this.voiceChannel.id).then((player: any) => {
this.player = player;
this.node = player.node;
this.player.on('error', async (err: string) => {
this.playing = false;
this.logError(err, 'player.error');
this.stop();
await this.queue.shift();
return new Promise((res: any, rej: any) =>
setTimeout(() =>
this.play().then(res).catch(rej), 100));
});
this.player.on('disconnect', (err?: string) => {
this.playing = false;
if (err == undefined) {
return;
}
this.logError(err, 'player.disconnect');
});
});
}
public async add(guildConfig: dyno.GuildConfig, options: PlayOptions) {
const prepend = !this.playing;
const search = options.search;
let queueItem: QueueItem;
let tracks: QueueItems;
await this.queue.getQueue();
try {
tracks = await this.resolver.resolveTracks(search);
if (tracks == undefined || tracks.length === 0) {
return Promise.reject(`No results for ${search}`);
}
queueItem = tracks[0];
if (queueItem == undefined) {
return Promise.reject(`No results found for ${search}`);
}
} catch (err) {
throw err;
}
if (guildConfig.isPremium !== true && queueItem != undefined) {
if (queueItem.length < 30) {
return Promise.reject(`That song is less then 30 seconds, please try another.`);
}
const maxSongLength = this.config.maxSongLength ? this.config.maxSongLength : 5400;
if (queueItem.length > maxSongLength) {
return Promise.reject('60m');
}
}
const isLink = (search.includes('youtu.be') || search.includes('youtube.com'));
if (guildConfig.isPremium === true && isLink === true && tracks.length > 1) {
this.statsd.increment('music.playlists');
try {
tracks = await this.queue.bulkAdd(tracks);
} catch (err) {
throw err;
}
if (options.channel != undefined) {
this.sendMessage(options.channel, `Added ${tracks.length} songs to the queue.`).catch(() => null);
}
return tracks;
} else {
this.statsd.increment('music.adds');
try {
await this.queue.add(queueItem, prepend);
} catch (err) {
throw err;
}
if (options.channel != undefined) {
this.sendMessage(options.channel, `Added ${queueItem.title} to the queue.`).catch(() => null);
}
return queueItem;
}
}
// tslint:disable-next-line:cyclomatic-complexity
public async play(guildConfig?: dyno.GuildConfig, options?: PlayOptions) {
if (this.isDead) {
return false;
}
let trackInfo;
if (guildConfig == undefined) {
guildConfig = await this.dyno.guilds.getOrFetch(this.guild.id);
}
if (!this.queue) {
await this.getQueue();
} else {
await this.queue.getQueue();
}
if (options && options.search) {
try {
trackInfo = await this.add(guildConfig, options);
} catch (err) {
throw err;
}
if (this.playing === true) {
return;
}
}
trackInfo = await this.getTrack();
if (!trackInfo) {
if (this.queue.size > 1) {
await this.queue.shift();
return this.play(guildConfig);
} else {
this.queue.remove();
throw new Error(`I can't play that song right now, please try another.`);
}
}
this.startTime = Date.now();
if (guildConfig.isPremium !== true) {
if (trackInfo != undefined && trackInfo.length > 0) {
const maxSongLength = this.config.maxSongLength ? this.config.maxSongLength : 5400;
if (trackInfo.length < 30 || trackInfo.length > maxSongLength) {
return this.skip();
}
}
const maxPlayingTime = this.config.maxPlayingTime ? this.config.maxPlayingTime : 14400;
if ((Date.now() - this.startTime) >= maxPlayingTime) {
this.stop(true);
if (this.textChannel != undefined) {
return this.sendMessage(this.textChannel,
'Leaving the channel for performance reasons, use ?play to continue or upgrade to remove this.');
}
}
}
if (!trackInfo == undefined) {
throw new Error('TrackInfoError: No track info.');
}
if (trackInfo.track == undefined) {
throw new Error('TrackInfoError: No track provided.');
}
this.track = trackInfo;
if (!this.volume && guildConfig.music.volume) {
this.setVolume(guildConfig.music.volume);
}
this.player.play(trackInfo.track);
this.playing = true;
this.player.once('end', (data: any) => {
this.votes = new Set();
if (data.reason != undefined && data.reason === 'REPLACED') {
return;
}
this.playing = false;
this.queue.shift().then((queue: QueueItems) => {
if (queue !== undefined && queue.length > 0) {
return this.play(guildConfig).catch((err: string) => {
this.logError(err, 'player.on.end');
});
}
if (this.textChannel != undefined) {
this.sendMessage(this.textChannel, `Queue concluded.`).catch(() => null);
}
this.stop(true);
}).catch(() => null);
});
this.statsd.increment('music.plays');
this.announce(trackInfo).catch(() => null);
}
public stop(leave?: boolean): void {
if (this.stopping === true) {
return;
}
this.stopping = true;
setTimeout(() => {
this.stopping = false;
}, 3000);
this.volume = null;
this.votes = new Set();
if (this.player != undefined) {
this.player.removeAllListeners('end');
if (leave === true) {
this.client.leaveVoiceChannel(this.voiceChannel.id);
return this.destroy();
}
try {
this.playing = false;
this.player.stop();
} catch (err) {
this.logError(err, 'player.stop');
}
if (this.stopping === true) {
this.stopping = false;
}
} else {
this.playing = false;
try {
this.client.leaveVoiceChannel(this.voiceChannel.id);
this.destroy();
} catch (err) {
this.logError(err, 'player.stop');
}
}
}
public async skip(message?: eris.Message, guildConfig?: dyno.GuildConfig, force?: boolean): Promise<any> {
if (message !== undefined && force !== true && this.voiceChannel.voiceMembers.size > 2) {
if (this.votes.has(message.author.id)) {
return this.sendMessage(message.channel, 'You have already voted.');
}
if ((this.votes.size / this.voiceChannel.voiceMembers.size) < 0.5) {
this.votes.add(message.author.id);
return this.sendMessage(message.channel, 'Your vote has been added, more votes are needed.');
} else {
this.votes = new Set();
}
}
if (this.queue.isEmpty()) {
await this.queue.getQueue();
}
this.statsd.increment('music.skips');
if (this.track && this.track.isSeekable) {
const position = (this.track.length - 2) * 1000;
if (this.playing) {
return this.player.seek(position);
}
return this.play(guildConfig).catch(() => false);
} else {
try {
this.stop();
await this.queue.shift();
return this.play(guildConfig);
} catch (err) {
this.logError(err, 'player.skip');
}
}
}
public setVolume(volume: number): void {
this.volume = volume;
if (this.player) {
this.player.setVolume(volume);
}
}
public async seek(message: eris.Message, position: number): Promise<any> {
if (position > (this.track.length * 1000)) {
return Promise.reject(`The song isn't that long.`);
}
this.player.seek(position);
return Promise.resolve();
}
public async playQueueItem(guildConfig: dyno.GuildConfig, index: string|number): Promise<any> {
if (this.queue.isEmpty()) {
await this.queue.getQueue();
}
if (typeof index === 'string') {
index = parseInt(index, 10);
}
let track = await this.queue.remove(index);
if (!track.v || track.v !== this.module.version) {
try {
const tracks = await this.resolver.resolveTracks(track.uri || track.url);
track = tracks[0];
} catch (err) {
throw err;
}
}
this.queue.add(track, true);
return this.play(guildConfig);
}
public async announce(queueItem: QueueItem, channel?: ErisChannel): Promise<any> {
if (this.textChannel == undefined) {
return;
}
if (this.cooldown !== undefined && (Date.now() - this.cooldown) <= 500) {
return;
}
this.cooldown = Date.now();
if (queueItem == undefined) {
queueItem = await this.getTrack();
}
let length;
if (queueItem.length > 0 && queueItem.length < 37000) {
const duration: any = moment.duration(queueItem.length, 'seconds');
length = duration.format('h[h] m[m] s[s]');
} else if (queueItem.length > 37000) {
length = '∞';
}
let thumbnail = '';
const uri = queueItem.uri || queueItem.url;
if (uri.includes('soundcloud.com')) {
thumbnail = ``;
} else {
thumbnail = `http://img.youtube.com/vi/${queueItem.identifier}/default.jpg`;
}
const embed: eris.EmbedBase = {
color: this.utils.getColor('blue'),
title: `:notes: Now Playing: ${queueItem.title}`,
fields: [
{ name: 'Link', value: `[Click Here](${uri})`, inline: true },
{ name: 'Playlist', value: `[Click Here](${this.config.site.host}/playlist/${this.guild.id}#${queueItem.identifier})`, inline: true },
],
thumbnail: { url: thumbnail },
timestamp: (new Date()).toISOString(),
};
if (length !== undefined) {
embed.footer = { text: `Length: ${length}` };
}
if (channel !== undefined) {
return this.sendMessage(channel, { embed });
}
let sortedMessages = null;
let lastMessage;
if (this.textChannel.messages != undefined && this.textChannel.messages.size > 0) {
sortedMessages = [...this.textChannel.messages.values()].sort((a: eris.Message, b: eris.Message) =>
(a.timestamp > b.timestamp) ? 1 : (a.timestamp < b.timestamp) ? -1 : 0);
lastMessage = sortedMessages ? sortedMessages.pop() : null;
}
if (this.nowPlayingId !== undefined && lastMessage !== undefined && lastMessage.id === this.nowPlayingId) {
return lastMessage.edit({ embed });
}
return this.sendMessage(this.textChannel, { embed }).then((msg: eris.Message) => {
this.nowPlayingId = msg.id;
});
}
public destroy(): void {
this.isDead = true;
this.module.deletePlayer(this.guild);
process.nextTick(() => {
// this.module = null;
});
}
private isMissingPermissions(...perms: string[]): string[] {
const permissions = this.voiceChannel.permissionsOf(this.client.user.id);
const missingPermissions = [];
for (const perm of perms) {
if (permissions.has(perm) !== true) {
missingPermissions.push(perm);
}
}
return missingPermissions.length > 0 ? missingPermissions : null;
}
private async getTrack(): Promise<QueueItem> {
const queue = await this.queue.getQueue();
let track = queue.length > 0 ? queue[0] : null;
if (track == null) {
return null;
}
if (track.v !== undefined && track.v === this.module.version) {
return track;
}
const url = track.uri !== undefined ? track.uri : track.url;
if (url == undefined) {
return null;
}
try {
const tracks = await this.resolver.resolveTracks(url);
track = tracks[0];
} catch (err) {
throw err;
}
if (track !== undefined) {
try {
await this.queue.replace(0, track);
} catch (err) {
throw err;
}
}
return track;
}
} |
constructor(dynoInstance: dyno.Dyno, module: any, data: PlayerData) {
super(dynoInstance, data.guild); |
memory.rs | // Copyright Materialize, Inc. All rights reserved.
//
// Use of this software is governed by the Business Source License
// included in the LICENSE file.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0.
use askama::Template;
use hyper::{Body, Request, Response};
use crate::http::util;
use crate::BUILD_INFO;
#[derive(Template)]
#[template(path = "http/templates/memory.html")]
struct | <'a> {
version: &'a str,
}
pub async fn handle_memory(
_: Request<Body>,
_: &mut coord::SessionClient,
) -> Result<Response<Body>, anyhow::Error> {
Ok(util::template_response(MemoryTemplate {
version: BUILD_INFO.version,
}))
}
| MemoryTemplate |
utilities_test.go | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package objects
import (
"strconv"
"time"
"github.com/apache/yunikorn-core/pkg/common/configs"
"github.com/apache/yunikorn-core/pkg/common/resources"
"github.com/apache/yunikorn-core/pkg/common/security"
"github.com/apache/yunikorn-core/pkg/rmproxy"
"github.com/apache/yunikorn-scheduler-interface/lib/go/si"
)
const (
appID1 = "app-1"
appID2 = "app-2"
aKey = "alloc-1"
nodeID1 = "node-1"
)
// Create the root queue, base for all testing
func createRootQueue(maxRes map[string]string) (*Queue, error) {
return createManagedQueueWithProps(nil, "root", true, maxRes, nil)
}
// wrapper around the create call using the one syntax for all queue types
func createManagedQueue(parentSQ *Queue, name string, parent bool, maxRes map[string]string) (*Queue, error) {
return createManagedQueueWithProps(parentSQ, name, parent, maxRes, nil)
}
// create managed queue with props set
func createManagedQueueWithProps(parentSQ *Queue, name string, parent bool, maxRes, props map[string]string) (*Queue, error) {
queueConfig := configs.QueueConfig{
Name: name,
Parent: parent,
Queues: nil,
Properties: props,
}
if maxRes != nil {
queueConfig.Resources = configs.Resources{
Max: maxRes,
}
}
queue, err := NewConfiguredQueue(queueConfig, parentSQ)
if err != nil {
return nil, err
}
// root queue can not set the max via the config
if parentSQ == nil {
var max *resources.Resource
max, err = resources.NewResourceFromConf(maxRes)
if err != nil {
return nil, err
}
// only set if we have some limit
if len(max.Resources) > 0 && !resources.IsZero(max) {
queue.SetMaxResource(max)
}
}
return queue, nil
}
// wrapper around the create call using the one syntax for all queue types
// NOTE: test code uses a flag for parent=true, dynamic queues use leaf flag
func createDynamicQueue(parentSQ *Queue, name string, parent bool) (*Queue, error) {
return NewDynamicQueue(name, !parent, parentSQ)
}
// Create application with minimal info
func newApplication(appID, partition, queueName string) *Application {
tags := make(map[string]string)
return newApplicationWithTags(appID, partition, queueName, tags)
}
// Create application with tags set
func newApplicationWithTags(appID, partition, queueName string, tags map[string]string) *Application |
func newApplicationWithHandler(appID, partition, queueName string) (*Application, *rmproxy.MockedRMProxy) {
return newApplicationWithPlaceholderTimeout(appID, partition, queueName, 0)
}
func newApplicationWithPlaceholderTimeout(appID, partition, queueName string, phTimeout int64) (*Application, *rmproxy.MockedRMProxy) {
user := security.UserGroup{
User: "testuser",
Groups: []string{},
}
siApp := &si.AddApplicationRequest{
ApplicationID: appID,
QueueName: queueName,
PartitionName: partition,
ExecutionTimeoutMilliSeconds: phTimeout,
}
mockEventHandler := rmproxy.NewMockedRMProxy()
return NewApplication(siApp, user, mockEventHandler, ""), mockEventHandler
}
// Create node with minimal info
func newNode(nodeID string, totalMap map[string]resources.Quantity) *Node {
total := resources.NewResourceFromMap(totalMap)
return newNodeInternal(nodeID, total, resources.Zero)
}
func newNodeRes(nodeID string, total *resources.Resource) *Node {
return newNodeInternal(nodeID, total, resources.Zero)
}
func newNodeInternal(nodeID string, total, occupied *resources.Resource) *Node {
return &Node{
NodeID: nodeID,
Hostname: "",
Rackname: "",
Partition: "",
attributes: nil,
totalResource: total,
occupiedResource: occupied,
allocatedResource: resources.NewResource(),
availableResource: resources.Sub(total, occupied),
allocations: make(map[string]*Allocation),
schedulable: true,
preempting: resources.NewResource(),
reservations: make(map[string]*reservation),
}
}
func newProto(nodeID string, totalResource, occupiedResource *resources.Resource, attributes map[string]string) *si.NodeInfo {
proto := si.NodeInfo{
NodeID: nodeID,
Attributes: attributes,
}
if totalResource != nil {
proto.SchedulableResource = &si.Resource{
Resources: map[string]*si.Quantity{},
}
for name, value := range totalResource.Resources {
quantity := si.Quantity{Value: int64(value)}
proto.SchedulableResource.Resources[name] = &quantity
}
}
if occupiedResource != nil {
proto.OccupiedResource = &si.Resource{
Resources: map[string]*si.Quantity{},
}
for name, value := range occupiedResource.Resources {
quantity := si.Quantity{Value: int64(value)}
proto.OccupiedResource.Resources[name] = &quantity
}
}
return &proto
}
// Create a new Allocation with a random ask key
func newAllocation(appID, uuid, nodeID, queueName string, res *resources.Resource) *Allocation {
askKey := strconv.FormatInt((time.Now()).UnixNano(), 10)
ask := newAllocationAsk(askKey, appID, res)
ask.setQueue(queueName)
return NewAllocation(uuid, nodeID, ask)
}
// Create a new Allocation with a random ask key
func newPlaceholderAlloc(appID, uuid, nodeID, queueName string, res *resources.Resource) *Allocation {
askKey := strconv.FormatInt((time.Now()).UnixNano(), 10)
ask := newAllocationAsk(askKey, appID, res)
ask.setQueue(queueName)
ask.placeholder = true
return NewAllocation(uuid, nodeID, ask)
}
func newAllocationAsk(allocKey, appID string, res *resources.Resource) *AllocationAsk {
return newAllocationAskTG(allocKey, appID, "", res, 1)
}
func newAllocationAskRepeat(allocKey, appID string, res *resources.Resource, repeat int) *AllocationAsk {
return newAllocationAskTG(allocKey, appID, "", res, repeat)
}
func newAllocationAskTG(allocKey, appID, taskGroup string, res *resources.Resource, repeat int) *AllocationAsk {
ask := &si.AllocationAsk{
AllocationKey: allocKey,
ApplicationID: appID,
PartitionName: "default",
ResourceAsk: res.ToProto(),
MaxAllocations: int32(repeat),
TaskGroupName: taskGroup,
Placeholder: taskGroup != "",
}
return NewAllocationAsk(ask)
}
| {
user := security.UserGroup{
User: "testuser",
Groups: []string{},
}
siApp := &si.AddApplicationRequest{
ApplicationID: appID,
QueueName: queueName,
PartitionName: partition,
Tags: tags,
}
return NewApplication(siApp, user, nil, "")
} |
sv-SE.min.js | !function(e,t){"object"==typeof exports&&"object"==typeof module?module.exports=t():"function"==typeof define&&define.amd?define([],t):"object"==typeof exports?exports.Fakerator=t():e.Fakerator=t()}(this,function(){return function(e){function t(n){if(r[n])return r[n].exports;var i=r[n]={exports:{},id:n,loaded:!1};return e[n].call(i.exports,i,i.exports,t),i.loaded=!0,i.exports}var r={};return t.m=e,t.c=r,t.p="",t(0)}(function(e){for(var t in e)if(Object.prototype.hasOwnProperty.call(e,t))switch(typeof e[t]){case"function":break;case"object":e[t]=function(t){var r=t.slice(1),n=e[t[0]];return function(e,t,i){n.apply(this,[e,t,i].concat(r))}}(e[t]);break;default:e[t]=e[e[t]]}return e}([function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}var i=r(1),a=n(i),o=r(2),s=n(o),u=r(3),c=n(u),l=r(10),f=n(l),h=r(11),d=n(h),p=r(99),m=n(p);e.exports=function(){var e=r(134),t=r(144);return e=(0,d.default)(e,t,function(e){return(0,s.default)(e)||(0,c.default)(e)?e:(0,a.default)(e)||(0,f.default)(e)?void 0:e}),new m.default(e)}},function(e,t){function r(e){return null==e}e.exports=r},function(e,t){var r=Array.isArray;e.exports=r},function(e,t,r){function n(e){if(!a(e))return!1;var t=i(e);return t==s||t==u||t==o||t==c}var i=r(4),a=r(10),o="[object AsyncFunction]",s="[object Function]",u="[object GeneratorFunction]",c="[object Proxy]";e.exports=n},function(e,t,r){function n(e){return null==e?void 0===e?u:s:c&&c in Object(e)?a(e):o(e)}var i=r(5),a=r(8),o=r(9),s="[object Null]",u="[object Undefined]",c=i?i.toStringTag:void 0;e.exports=n},function(e,t,r){var n=r(6),i=n.Symbol;e.exports=i},function(e,t,r){var n=r(7),i="object"==typeof self&&self&&self.Object===Object&&self,a=n||i||Function("return this")();e.exports=a},function(e,t){(function(t){var r="object"==typeof t&&t&&t.Object===Object&&t;e.exports=r}).call(t,function(){return this}())},function(e,t,r){function n(e){var t=o.call(e,u),r=e[u];try{e[u]=void 0;var n=!0}catch(e){}var i=s.call(e);return n&&(t?e[u]=r:delete e[u]),i}var i=r(5),a=Object.prototype,o=a.hasOwnProperty,s=a.toString,u=i?i.toStringTag:void 0;e.exports=n},function(e,t){function r(e){return i.call(e)}var n=Object.prototype,i=n.toString;e.exports=r},function(e,t){function r(e){var t=typeof e;return null!=e&&("object"==t||"function"==t)}e.exports=r},function(e,t,r){var n=r(12),i=r(89),a=i(function(e,t,r,i){n(e,t,r,i)});e.exports=a},function(e,t,r){function n(e,t,r,f,h){e!==t&&o(t,function(o,c){if(h||(h=new i),u(o))s(e,t,c,r,n,f,h);else{var d=f?f(l(e,c),o,c+"",e,t,h):void 0;void 0===d&&(d=o),a(e,c,d)}},c)}var i=r(13),a=r(49),o=r(52),s=r(54),u=r(10),c=r(83),l=r(79);e.exports=n},function(e,t,r){function n(e){var t=this.__data__=new i(e);this.size=t.size}var i=r(14),a=r(22),o=r(23),s=r(24),u=r(25),c=r(26);n.prototype.clear=a,n.prototype.delete=o,n.prototype.get=s,n.prototype.has=u,n.prototype.set=c,e.exports=n},function(e,t,r){function n(e){var t=-1,r=null==e?0:e.length;for(this.clear();++t<r;){var n=e[t];this.set(n[0],n[1])}}var i=r(15),a=r(16),o=r(19),s=r(20),u=r(21);n.prototype.clear=i,n.prototype.delete=a,n.prototype.get=o,n.prototype.has=s,n.prototype.set=u,e.exports=n},function(e,t){function r(){this.__data__=[],this.size=0}e.exports=r},function(e,t,r){function n(e){var t=this.__data__,r=i(t,e);if(r<0)return!1;var n=t.length-1;return r==n?t.pop():o.call(t,r,1),--this.size,!0}var i=r(17),a=Array.prototype,o=a.splice;e.exports=n},function(e,t,r){function n(e,t){for(var r=e.length;r--;)if(i(e[r][0],t))return r;return-1}var i=r(18);e.exports=n},function(e,t){function r(e,t){return e===t||e!==e&&t!==t}e.exports=r},function(e,t,r){function n(e){var t=this.__data__,r=i(t,e);return r<0?void 0:t[r][1]}var i=r(17);e.exports=n},function(e,t,r){function n(e){return i(this.__data__,e)>-1}var i=r(17);e.exports=n},function(e,t,r){function n(e,t){var r=this.__data__,n=i(r,e);return n<0?(++this.size,r.push([e,t])):r[n][1]=t,this}var i=r(17);e.exports=n},function(e,t,r){function n(){this.__data__=new i,this.size=0}var i=r(14);e.exports=n},function(e,t){function r(e){var t=this.__data__,r=t.delete(e);return this.size=t.size,r}e.exports=r},function(e,t){function r(e){return this.__data__.get(e)}e.exports=r},function(e,t){function r(e){return this.__data__.has(e)}e.exports=r},function(e,t,r){function n(e,t){var r=this.__data__;if(r instanceof i){var n=r.__data__;if(!a||n.length<s-1)return n.push([e,t]),this.size=++r.size,this;r=this.__data__=new o(n)}return r.set(e,t),this.size=r.size,this}var i=r(14),a=r(27),o=r(34),s=200;e.exports=n},function(e,t,r){var n=r(28),i=r(6),a=n(i,"Map");e.exports=a},function(e,t,r){function n(e,t){var r=a(e,t);return i(r)?r:void 0}var i=r(29),a=r(33);e.exports=n},function(e,t,r){function n(e){if(!o(e)||a(e))return!1;var t=i(e)?p:c;return t.test(s(e))}var i=r(3),a=r(30),o=r(10),s=r(32),u=/[\\^$.*+?()[\]{}|]/g,c=/^\[object .+?Constructor\]$/,l=Function.prototype,f=Object.prototype,h=l.toString,d=f.hasOwnProperty,p=RegExp("^"+h.call(d).replace(u,"\\$&").replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g,"$1.*?")+"$");e.exports=n},function(e,t,r){function n(e){return!!a&&a in e}var i=r(31),a=function(){var e=/[^.]+$/.exec(i&&i.keys&&i.keys.IE_PROTO||"");return e?"Symbol(src)_1."+e:""}();e.exports=n},function(e,t,r){var n=r(6),i=n["__core-js_shared__"];e.exports=i},function(e,t){function r(e){if(null!=e){try{return i.call(e)}catch(e){}try{return e+""}catch(e){}}return""}var n=Function.prototype,i=n.toString;e.exports=r},function(e,t){function r(e,t){return null==e?void 0:e[t]}e.exports=r},function(e,t,r){function n(e){var t=-1,r=null==e?0:e.length;for(this.clear();++t<r;){var n=e[t];this.set(n[0],n[1])}}var i=r(35),a=r(43),o=r(46),s=r(47),u=r(48);n.prototype.clear=i,n.prototype.delete=a,n.prototype.get=o,n.prototype.has=s,n.prototype.set=u,e.exports=n},function(e,t,r){function n(){this.size=0,this.__data__={hash:new i,map:new(o||a),string:new i}}var i=r(36),a=r(14),o=r(27);e.exports=n},function(e,t,r){function n(e){var t=-1,r=null==e?0:e.length;for(this.clear();++t<r;){var n=e[t];this.set(n[0],n[1])}}var i=r(37),a=r(39),o=r(40),s=r(41),u=r(42);n.prototype.clear=i,n.prototype.delete=a,n.prototype.get=o,n.prototype.has=s,n.prototype.set=u,e.exports=n},function(e,t,r){function n(){this.__data__=i?i(null):{},this.size=0}var i=r(38);e.exports=n},function(e,t,r){var n=r(28),i=n(Object,"create");e.exports=i},function(e,t){function r(e){var t=this.has(e)&&delete this.__data__[e];return this.size-=t?1:0,t}e.exports=r},function(e,t,r){function n(e){var t=this.__data__;if(i){var r=t[e];return r===a?void 0:r}return s.call(t,e)?t[e]:void 0}var i=r(38),a="__lodash_hash_undefined__",o=Object.prototype,s=o.hasOwnProperty;e.exports=n},function(e,t,r){function n(e){var t=this.__data__;return i?void 0!==t[e]:o.call(t,e)}var i=r(38),a=Object.prototype,o=a.hasOwnProperty;e.exports=n},function(e,t,r){function n(e,t){var r=this.__data__;return this.size+=this.has(e)?0:1,r[e]=i&&void 0===t?a:t,this}var i=r(38),a="__lodash_hash_undefined__";e.exports=n},function(e,t,r){function n(e){var t=i(this,e).delete(e);return this.size-=t?1:0,t}var i=r(44);e.exports=n},function(e,t,r){function n(e,t){var r=e.__data__;return i(t)?r["string"==typeof t?"string":"hash"]:r.map}var i=r(45);e.exports=n},function(e,t){function r(e){var t=typeof e;return"string"==t||"number"==t||"symbol"==t||"boolean"==t?"__proto__"!==e:null===e}e.exports=r},function(e,t,r){function n(e){return i(this,e).get(e)}var i=r(44);e.exports=n},function(e,t,r){function n(e){return i(this,e).has(e)}var i=r(44);e.exports=n},function(e,t,r){function n(e,t){var r=i(this,e),n=r.size;return r.set(e,t),this.size+=r.size==n?0:1,this}var i=r(44);e.exports=n},function(e,t,r){function n(e,t,r){(void 0===r||a(e[t],r))&&(void 0!==r||t in e)||i(e,t,r)}var i=r(50),a=r(18);e.exports=n},function(e,t,r){function n(e,t,r){"__proto__"==t&&i?i(e,t,{configurable:!0,enumerable:!0,value:r,writable:!0}):e[t]=r}var i=r(51);e.exports=n},function(e,t,r){var n=r(28),i=function(){try{var e=n(Object,"defineProperty");return e({},"",{}),e}catch(e){}}();e.exports=i},function(e,t,r){var n=r(53),i=n();e.exports=i},function(e,t){function r(e){return function(t,r,n){for(var i=-1,a=Object(t),o=n(t),s=o.length;s--;){var u=o[e?s:++i];if(r(a[u],u,a)===!1)break}return t}}e.exports=r},function(e,t,r){function n(e,t,r,n,b,w,_){var k=g(e,r),S=g(t,r),A=_.get(S);if(A)return void i(e,r,A);var M=w?w(k,S,r+"",e,t,_):void 0,x=void 0===M;if(x){var E=l(S),B=!E&&h(S),j=!E&&!B&&y(S);M=S,E||B||j?l(k)?M=k:f(k)?M=s(k):B?(x=!1,M=a(S,!0)):j?(x=!1,M=o(S,!0)):M=[]:m(S)||c(S)?(M=k,c(k)?M=v(k):p(k)&&!d(k)||(M=u(S))):x=!1}x&&(_.set(S,M),b(M,S,n,w,_),_.delete(S)),i(e,r,M)}var i=r(49),a=r(55),o=r(57),s=r(60),u=r(61),c=r(66),l=r(2),f=r(69),h=r(72),d=r(3),p=r(10),m=r(74),y=r(75),g=r(79),v=r(80);e.exports=n},function(e,t,r){(function(e){function n(e,t){if(t)return e.slice();var r=e.length,n=c?c(r):new e.constructor(r);return e.copy(n),n}var i=r(6),a="object"==typeof t&&t&&!t.nodeType&&t,o=a&&"object"==typeof e&&e&&!e.nodeType&&e,s=o&&o.exports===a,u=s?i.Buffer:void 0,c=u?u.allocUnsafe:void 0;e.exports=n}).call(t,r(56)(e))},function(e,t){e.exports=function(e){return e.webpackPolyfill||(e.deprecate=function(){},e.paths=[],e.children=[],e.webpackPolyfill=1),e}},function(e,t,r){function n(e,t){var r=t?i(e.buffer):e.buffer;return new e.constructor(r,e.byteOffset,e.length)}var i=r(58);e.exports=n},function(e,t,r){function n(e){var t=new e.constructor(e.byteLength);return new i(t).set(new i(e)),t}var i=r(59);e.exports=n},function(e,t,r){var n=r(6),i=n.Uint8Array;e.exports=i},function(e,t){function r(e,t){var r=-1,n=e.length;for(t||(t=Array(n));++r<n;)t[r]=e[r];return t}e.exports=r},function(e,t,r){function n(e){return"function"!=typeof e.constructor||o(e)?{}:i(a(e))}var i=r(62),a=r(63),o=r(65);e.exports=n},function(e,t,r){var n=r(10),i=Object.create,a=function(){function e(){}return function(t){if(!n(t))return{};if(i)return i(t);e.prototype=t;var r=new e;return e.prototype=void 0,r}}();e.exports=a},function(e,t,r){var n=r(64),i=n(Object.getPrototypeOf,Object);e.exports=i},function(e,t){function r(e,t){return function(r){return e(t(r))}}e.exports=r},function(e,t){function r(e){var t=e&&e.constructor,r="function"==typeof t&&t.prototype||n;return e===r}var n=Object.prototype;e.exports=r},function(e,t,r){var n=r(67),i=r(68),a=Object.prototype,o=a.hasOwnProperty,s=a.propertyIsEnumerable,u=n(function(){return arguments}())?n:function(e){return i(e)&&o.call(e,"callee")&&!s.call(e,"callee")};e.exports=u},function(e,t,r){function n(e){return a(e)&&i(e)==o}var i=r(4),a=r(68),o="[object Arguments]";e.exports=n},function(e,t){function r(e){return null!=e&&"object"==typeof e}e.exports=r},function(e,t,r){function n(e){return a(e)&&i(e)}var i=r(70),a=r(68);e.exports=n},function(e,t,r){function n(e){return null!=e&&a(e.length)&&!i(e)}var i=r(3),a=r(71);e.exports=n},function(e,t){function r(e){return"number"==typeof e&&e>-1&&e%1==0&&e<=n}var n=9007199254740991;e.exports=r},function(e,t,r){(function(e){var n=r(6),i=r(73),a="object"==typeof t&&t&&!t.nodeType&&t,o=a&&"object"==typeof e&&e&&!e.nodeType&&e,s=o&&o.exports===a,u=s?n.Buffer:void 0,c=u?u.isBuffer:void 0,l=c||i;e.exports=l}).call(t,r(56)(e))},function(e,t){function r(){return!1}e.exports=r},function(e,t,r){function n(e){if(!o(e)||i(e)!=s)return!1;var t=a(e);if(null===t)return!0;var r=f.call(t,"constructor")&&t.constructor;return"function"==typeof r&&r instanceof r&&l.call(r)==h}var i=r(4),a=r(63),o=r(68),s="[object Object]",u=Function.prototype,c=Object.prototype,l=u.toString,f=c.hasOwnProperty,h=l.call(Object);e.exports=n},function(e,t,r){var n=r(76),i=r(77),a=r(78),o=a&&a.isTypedArray,s=o?i(o):n;e.exports=s},function(e,t,r){function n(e){return o(e)&&a(e.length)&&!!T[i(e)]}var i=r(4),a=r(71),o=r(68),s="[object Arguments]",u="[object Array]",c="[object Boolean]",l="[object Date]",f="[object Error]",h="[object Function]",d="[object Map]",p="[object Number]",m="[object Object]",y="[object RegExp]",g="[object Set]",v="[object String]",b="[object WeakMap]",w="[object ArrayBuffer]",_="[object DataView]",k="[object Float32Array]",S="[object Float64Array]",A="[object Int8Array]",M="[object Int16Array]",x="[object Int32Array]",E="[object Uint8Array]",B="[object Uint8ClampedArray]",j="[object Uint16Array]",C="[object Uint32Array]",T={};T[k]=T[S]=T[A]=T[M]=T[x]=T[E]=T[B]=T[j]=T[C]=!0,T[s]=T[u]=T[w]=T[c]=T[_]=T[l]=T[f]=T[h]=T[d]=T[p]=T[m]=T[y]=T[g]=T[v]=T[b]=!1,e.exports=n},function(e,t){function r(e){return function(t){return e(t)}}e.exports=r},function(e,t,r){(function(e){var n=r(7),i="object"==typeof t&&t&&!t.nodeType&&t,a=i&&"object"==typeof e&&e&&!e.nodeType&&e,o=a&&a.exports===i,s=o&&n.process,u=function(){try{var e=a&&a.require&&a.require("util").types;return e?e:s&&s.binding&&s.binding("util")}catch(e){}}();e.exports=u}).call(t,r(56)(e))},function(e,t){function r(e,t){if(("constructor"!==t||"function"!=typeof e[t])&&"__proto__"!=t)return e[t]}e.exports=r},function(e,t,r){function n(e){return i(e,a(e))}var i=r(81),a=r(83);e.exports=n},function(e,t,r){function n(e,t,r,n){var o=!r;r||(r={});for(var s=-1,u=t.length;++s<u;){var c=t[s],l=n?n(r[c],e[c],c,r,e):void 0;void 0===l&&(l=e[c]),o?a(r,c,l):i(r,c,l)}return r}var i=r(82),a=r(50);e.exports=n},function(e,t,r){function n(e,t,r){var n=e[t];s.call(e,t)&&a(n,r)&&(void 0!==r||t in e)||i(e,t,r)}var i=r(50),a=r(18),o=Object.prototype,s=o.hasOwnProperty;e.exports=n},function(e,t,r){function n(e){return o(e)?i(e,!0):a(e)}var i=r(84),a=r(87),o=r(70);e.exports=n},function(e,t,r){function n(e,t){var r=o(e),n=!r&&a(e),l=!r&&!n&&s(e),h=!r&&!n&&!l&&c(e),d=r||n||l||h,p=d?i(e.length,String):[],m=p.length;for(var y in e)!t&&!f.call(e,y)||d&&("length"==y||l&&("offset"==y||"parent"==y)||h&&("buffer"==y||"byteLength"==y||"byteOffset"==y)||u(y,m))||p.push(y);return p}var i=r(85),a=r(66),o=r(2),s=r(72),u=r(86),c=r(75),l=Object.prototype,f=l.hasOwnProperty;e.exports=n},function(e,t){function r(e,t){for(var r=-1,n=Array(e);++r<e;)n[r]=t(r);return n}e.exports=r},function(e,t){function r(e,t){var r=typeof e;return t=null==t?n:t,!!t&&("number"==r||"symbol"!=r&&i.test(e))&&e>-1&&e%1==0&&e<t}var n=9007199254740991,i=/^(?:0|[1-9]\d*)$/;e.exports=r},function(e,t,r){function n(e){if(!i(e))return o(e);var t=a(e),r=[];for(var n in e)("constructor"!=n||!t&&u.call(e,n))&&r.push(n);return r}var i=r(10),a=r(65),o=r(88),s=Object.prototype,u=s.hasOwnProperty;e.exports=n},function(e,t){function r(e){var t=[];if(null!=e)for(var r in Object(e))t.push(r);return t}e.exports=r},function(e,t,r){function n(e){return i(function(t,r){var n=-1,i=r.length,o=i>1?r[i-1]:void 0,s=i>2?r[2]:void 0;for(o=e.length>3&&"function"==typeof o?(i--,o):void 0,s&&a(r[0],r[1],s)&&(o=i<3?void 0:o,i=1),t=Object(t);++n<i;){var u=r[n];u&&e(t,u,n,o)}return t})}var i=r(90),a=r(98);e.exports=n},function(e,t,r){function n(e,t){return o(a(e,t,i),e+"")}var i=r(91),a=r(92),o=r(94);e.exports=n},function(e,t){function r(e){return e}e.exports=r},function(e,t,r){function n(e,t,r){return t=a(void 0===t?e.length-1:t,0),function(){for(var n=arguments,o=-1,s=a(n.length-t,0),u=Array(s);++o<s;)u[o]=n[t+o];o=-1;for(var c=Array(t+1);++o<t;)c[o]=n[o];return c[t]=r(u),i(e,this,c)}}var i=r(93),a=Math.max;e.exports=n},function(e,t){function r(e,t,r){switch(r.length){case 0:return e.call(t);case 1:return e.call(t,r[0]);case 2:return e.call(t,r[0],r[1]);case 3:return e.call(t,r[0],r[1],r[2])}return e.apply(t,r)}e.exports=r},function(e,t,r){var n=r(95),i=r(97),a=i(n);e.exports=a},function(e,t,r){var n=r(96),i=r(51),a=r(91),o=i?function(e,t){return i(e,"toString",{configurable:!0,enumerable:!1,value:n(t),writable:!0})}:a;e.exports=o},function(e,t){function r(e){return function(){return e}}e.exports=r},function(e,t){function r(e){var t=0,r=0;return function(){var o=a(),s=i-(o-r);if(r=o,s>0){if(++t>=n)return arguments[0]}else t=0;return e.apply(void 0,arguments)}}var n=800,i=16,a=Date.now;e.exports=r},function(e,t,r){function n(e,t,r){if(!s(r))return!1;var n=typeof t;return!!("number"==n?a(r)&&o(t,r.length):"string"==n&&t in r)&&i(r[t],e)}var i=r(18),a=r(70),o=r(86),s=r(10);e.exports=n},function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}function i(e,t,r){return t in e?Object.defineProperty(e,t,{value:r,enumerable:!0,configurable:!0,writable:!0}):e[t]=r,e}var a=r(100),o=n(a),s=r(112),u=n(s),c=r(122),l=n(c),f=r(1),h=n(f),d=r(2),p=n(d),m=r(131),y=n(m),g=r(3),v=n(g),b=r(132),w=n(b),_=r(10),k=n(_),S=r(133),A="abcdefghijklmnopqrstuvwxyz",M="0123456789"+A;e.exports=function(e){function t(e,n,i){(0,u.default)(Object.keys(n),function(a){if("_meta"!==a){var o=n[a];(0,k.default)(o)&&!(0,p.default)(o)&&!(0,v.default)(o)&&i<10?(e[a]={},t(e[a],o,i+1)):e[a]=function(){for(var e=arguments.length,t=Array(e),n=0;n<e;n++)t[n]=arguments[n];return r.generate.apply(r,[o].concat(t))}}})}var r=this;r.locale=e,r.seed=function(e){(0,p.default)(e)&&e.length>0?S.seed_array(e):S.seed(e)},r.random={number:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:9999,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:1;if(t>e){var n=[e,t];t=n[0],e=n[1]}return e/=r,t/=r,r*Math.floor(S.rand(e+1,t))},boolean:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:50;return r.random.number(0,100)<=e},digit:function(){return r.random.number(9)},hex:function(){for(var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1,t=[],n=0;n<e;n++)t.push(r.random.number(15).toString(16));return t.join("")},letter:function(){return r.random.arrayElement(A)},string:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=[];(0,k.default)(e)&&(e=r.random.number(e.min||5,e.max||10));for(var n=0;n<e;n++)t.push(r.random.letter());return t.join("")},arrayElement:function(e){if(e&&e.length>0)return e[r.random.number(e.length-1)]},objectElement:function(e){if(e){var t=r.random.arrayElement(Object.keys(e));return i({},t,e[t])}},masked:function(e){if(!(0,h.default)(e)){for(var t=[],n=0;n<=e.length;n++)"9"===e.charAt(n)?t.push(r.random.number(9).toString()):"a"===e.charAt(n)?t.push(r.random.arrayElement(A)):"A"===e.charAt(n)?t.push(r.random.arrayElement(A).toUpperCase()):"*"===e.charAt(n)?t.push(r.random.arrayElement(M)):t.push(e.charAt(n));return t.join("")}}},r.capitalize=l.default,r.slugify=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"";return e.trim().replace(/ /g,"-").replace(/[^\w\.\-]+/g,"")},r.replaceSymbols=function(e){var t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"#",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:"\\?";if(e)return e.replace(new RegExp(t,"g"),r.random.digit).replace(new RegExp(n,"g"),r.random.letter)},r.shuffle=function(e){if(!(0,h.default)(e)){for(var t,n,i=e.length-1;i;t=r.random.number(i),n=e[--i],e[i]=e[t],e[t]=n);return e}};var n=new RegExp(e._meta.mask||"#{([A-Za-z0-9_.]+)}","g");return r.populate=function(e){for(var t=arguments.length,i=Array(t>1?t-1:0),a=1;a<t;a++)i[a-1]=arguments[a];if(!(0,h.default)(e)){var s=e;return s=e.replace(n,function(e,t){var n=(0,o.default)(r.locale,t);if(n){if((0,v.default)(n)){var a;n=(a=n).call.apply(a,[r].concat(i))}if((0,p.default)(n)){if(0==n.length)return;return r.populate.apply(r,[r.random.arrayElement(n)].concat(i))}if((0,y.default)(n))return r.populate.apply(r,[n].concat(i));if((0,w.default)(n)||(0,k.default)(n))return n}return e}),(0,y.default)(s)&&(s=r.replaceSymbols(s)),s}},r.times=function(e,t){var n=[];(0,k.default)(t)&&(t=this.random.number(t.min||1,t.max||10));for(var i=arguments.length,a=Array(i>2?i-2:0),o=2;o<i;o++)a[o-2]=arguments[o];for(var s=0;s<t;s++)n.push(e.call.apply(e,[r].concat(a)));return n},r.utimes=function(e,t){var n=[];(0,k.default)(t)&&(t=this.random.number(t.min||1,t.max||10));for(var i=0,a=arguments.length,o=Array(a>2?a-2:0),s=2;s<a;s++)o[s-2]=arguments[s];for(;n.length<t&&i<5*t;){var u=e.call.apply(e,[r].concat(o));n.indexOf(u)==-1&&n.push(u),i++}return n},r.generate=function(e){for(var t=void 0,i=arguments.length,a=Array(i>1?i-1:0),o=1;o<i;o++)a[o-1]=arguments[o];if((0,v.default)(e))t=e.call.apply(e,[r].concat(a));else if((0,p.default)(e))e.length>=0&&(t=r.random.arrayElement(e));else if((0,y.default)(e)){if(!n.test(e))return t=r.replaceSymbols(e);t=r.populate.apply(r,[e].concat(a))}else if((0,w.default)(e)||(0,k.default)(e))return e;if(t)return r.generate.apply(r,[t].concat(a))},t(r,r.locale,1),r}},function(e,t,r){function n(e,t,r){var n=null==e?void 0:i(e,t);return void 0===n?r:n}var i=r(101);e.exports=n},function(e,t,r){function n(e,t){t=i(t,e);for(var r=0,n=t.length;null!=e&&r<n;)e=e[a(t[r++])];return r&&r==n?e:void 0}var i=r(102),a=r(111);e.exports=n},function(e,t,r){function n(e,t){return i(e)?e:a(e,t)?[e]:o(s(e))}var i=r(2),a=r(103),o=r(105),s=r(108);e.exports=n},function(e,t,r){function n(e,t){if(i(e))return!1;var r=typeof e;return!("number"!=r&&"symbol"!=r&&"boolean"!=r&&null!=e&&!a(e))||(s.test(e)||!o.test(e)||null!=t&&e in Object(t))}var i=r(2),a=r(104),o=/\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,s=/^\w*$/;e.exports=n},function(e,t,r){function n(e){return"symbol"==typeof e||a(e)&&i(e)==o}var i=r(4),a=r(68),o="[object Symbol]";e.exports=n},function(e,t,r){var n=r(106),i=/[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g,a=/\\(\\)?/g,o=n(function(e){var t=[];return 46===e.charCodeAt(0)&&t.push(""),e.replace(i,function(e,r,n,i){t.push(n?i.replace(a,"$1"):r||e)}),t});e.exports=o},function(e,t,r){function n(e){var t=i(e,function(e){return r.size===a&&r.clear(),e}),r=t.cache;return t}var i=r(107),a=500;e.exports=n},function(e,t,r){function n(e,t){if("function"!=typeof e||null!=t&&"function"!=typeof t)throw new TypeError(a);var r=function(){var n=arguments,i=t?t.apply(this,n):n[0],a=r.cache;if(a.has(i))return a.get(i);var o=e.apply(this,n);return r.cache=a.set(i,o)||a,o};return r.cache=new(n.Cache||i),r}var i=r(34),a="Expected a function";n.Cache=i,e.exports=n},function(e,t,r){function n(e){return null==e?"":i(e)}var i=r(109);e.exports=n},function(e,t,r){function n(e){if("string"==typeof e)return e;if(o(e))return a(e,n)+"";if(s(e))return l?l.call(e):"";var t=e+"";return"0"==t&&1/e==-u?"-0":t}var i=r(5),a=r(110),o=r(2),s=r(104),u=1/0,c=i?i.prototype:void 0,l=c?c.toString:void 0;e.exports=n},function(e,t){function r(e,t){for(var r=-1,n=null==e?0:e.length,i=Array(n);++r<n;)i[r]=t(e[r],r,e);return i}e.exports=r},function(e,t,r){function n(e){if("string"==typeof e||i(e))return e;var t=e+"";return"0"==t&&1/e==-a?"-0":t}var i=r(104),a=1/0;e.exports=n},function(e,t,r){e.exports=r(113)},function(e,t,r){function n(e,t){var r=s(e)?i:a;return r(e,o(t))}var i=r(114),a=r(115),o=r(121),s=r(2);e.exports=n},function(e,t){function r(e,t){for(var r=-1,n=null==e?0:e.length;++r<n&&t(e[r],r,e)!==!1;);return e}e.exports=r},function(e,t,r){var n=r(116),i=r(120),a=i(n);e.exports=a},function(e,t,r){function n(e,t){return e&&i(e,t,a)}var i=r(52),a=r(117);e.exports=n},function(e,t,r){function n(e){return o(e)?i(e):a(e)}var i=r(84),a=r(118),o=r(70);e.exports=n},function(e,t,r){function n(e){if(!i(e))return a(e);var t=[];for(var r in Object(e))s.call(e,r)&&"constructor"!=r&&t.push(r);return t}var i=r(65),a=r(119),o=Object.prototype,s=o.hasOwnProperty;e.exports=n},function(e,t,r){var n=r(64),i=n(Object.keys,Object);e.exports=i},function(e,t,r){function n(e,t){return function(r,n){if(null==r)return r;if(!i(r))return e(r,n);for(var a=r.length,o=t?a:-1,s=Object(r);(t?o--:++o<a)&&n(s[o],o,s)!==!1;);return r}}var i=r(70);e.exports=n},function(e,t,r){function n(e){return"function"==typeof e?e:i}var i=r(91);e.exports=n},function(e,t,r){function n(e){return a(i(e).toLowerCase())}var i=r(108),a=r(123);e.exports=n},function(e,t,r){var n=r(124),i=n("toUpperCase");e.exports=i},function(e,t,r){function n(e){return function(t){t=s(t);var r=a(t)?o(t):void 0,n=r?r[0]:t.charAt(0),u=r?i(r,1).join(""):t.slice(1);return n[e]()+u}}var i=r(125),a=r(127),o=r(128),s=r(108);e.exports=n},function(e,t,r){function n(e,t,r){var n=e.length;return r=void 0===r?n:r,!t&&r>=n?e:i(e,t,r)}var i=r(126);e.exports=n},function(e,t){function r(e,t,r){var n=-1,i=e.length;t<0&&(t=-t>i?0:i+t),r=r>i?i:r,r<0&&(r+=i),i=t>r?0:r-t>>>0,t>>>=0;for(var a=Array(i);++n<i;)a[n]=e[n+t];return a}e.exports=r},function(e,t){function r(e){return l.test(e)}var n="\\ud800-\\udfff",i="\\u0300-\\u036f",a="\\ufe20-\\ufe2f",o="\\u20d0-\\u20ff",s=i+a+o,u="\\ufe0e\\ufe0f",c="\\u200d",l=RegExp("["+c+n+s+u+"]");e.exports=r},function(e,t,r){function n(e){return a(e)?o(e):i(e)}var i=r(129),a=r(127),o=r(130);e.exports=n},function(e,t){function r(e){return e.split("")}e.exports=r},function(e,t){function r(e){return e.match(k)||[]}var n="\\ud800-\\udfff",i="\\u0300-\\u036f",a="\\ufe20-\\ufe2f",o="\\u20d0-\\u20ff",s=i+a+o,u="\\ufe0e\\ufe0f",c="["+n+"]",l="["+s+"]",f="\\ud83c[\\udffb-\\udfff]",h="(?:"+l+"|"+f+")",d="[^"+n+"]",p="(?:\\ud83c[\\udde6-\\uddff]){2}",m="[\\ud800-\\udbff][\\udc00-\\udfff]",y="\\u200d",g=h+"?",v="["+u+"]?",b="(?:"+y+"(?:"+[d,p,m].join("|")+")"+v+g+")*",w=v+g+b,_="(?:"+[d+l+"?",l,p,m,c].join("|")+")",k=RegExp(f+"(?="+f+")|"+_+w,"g");e.exports=r},function(e,t,r){function n(e){return"string"==typeof e||!a(e)&&o(e)&&i(e)==s}var i=r(4),a=r(2),o=r(68),s="[object String]";e.exports=n},function(e,t,r){function n(e){return"number"==typeof e||a(e)&&i(e)==o}var i=r(4),a=r(68),o="[object Number]";e.exports=n},function(e,t){"use strict";function r(){function e(e){return e<0?(e^s)+s:e}function t(t,r){return t<r?e(4294967296-(r-t)&4294967295):t-r}function r(t,r){return e(t+r&4294967295)}function n(t,n){for(var i=0,a=0;a<32;++a)t>>>a&1&&(i=r(i,e(n<<a)));return i}var i,a,o,s,u;i=624,a=397,o=2567483615,s=2147483648,u=2147483647;var c=new Array(i),l=i+1;this.init_genrand=function(t){for(c[0]=e(4294967295&t),l=1;l<i;l++)c[l]=r(n(1812433253,e(c[l-1]^c[l-1]>>>30)),l),c[l]=e(4294967295&c[l])},this.init_by_array=function(a,o){var s,u,l,f;for(this.init_genrand(19650218),s=1,u=0,l=i>o?i:o;l;l--)c[s]=r(r(e(c[s]^n(e(c[s-1]^c[s-1]>>>30),1664525)),a[u]),u),c[s]=e(4294967295&c[s]),s++,u++,s>=i&&(c[0]=c[i-1],s=1),u>=o&&(u=0);for(l=i-1;l;l--)c[s]=t(e((f=c[s])^n(e(c[s-1]^c[s-1]>>>30),1566083941)),s),c[s]=e(4294967295&c[s]),s++,s>=i&&(c[0]=c[i-1],s=1);c[0]=2147483648};var f=[0,o];this.genrand_int32=function(){var t;if(l>=i){var r;for(l==i+1&&this.init_genrand(5489),r=0;r<i-a;r++)t=e(c[r]&s|c[r+1]&u),c[r]=e(c[r+a]^t>>>1^f[1&t]);for(;r<i-1;r++)t=e(c[r]&s|c[r+1]&u),c[r]=e(c[r+(a-i)]^t>>>1^f[1&t]);t=e(c[i-1]&s|c[0]&u),c[i-1]=e(c[a-1]^t>>>1^f[1&t]),l=0}return t=c[l++],t=e(t^t>>>11),t=e(t^t<<7&2636928640),t=e(t^t<<15&4022730752),t=e(t^t>>>18)},this.genrand_int31=function(){return this.genrand_int32()>>>1},this.genrand_real1=function(){return this.genrand_int32()*(1/4294967295)},this.genrand_real2=function(){return this.genrand_int32()*(1/4294967296)},this.genrand_real3=function(){return(this.genrand_int32()+.5)*(1/4294967296)},this.genrand_res53=function(){var e=this.genrand_int32()>>>5,t=this.genrand_int32()>>>6;return(67108864*e+t)*(1/9007199254740992)}}var n="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e};e.exports.MersenneTwister19937=r;var i=new r;i.init_genrand((new Date).getTime()%1e9),e.exports.rand=function(e,t){return void 0===e&&(t=0,e=32768),Math.floor(i.genrand_real2()*(e-t)+t)},e.exports.seed=function(e){if("number"!=typeof e)throw new Error("seed(S) must take numeric argument; is "+("undefined"==typeof e?"undefined":n(e)));i.init_genrand(e)},e.exports.seed_array=function(e){if("object"!=("undefined"==typeof e?"undefined":n(e)))throw new Error("seed_array(A) must take array of numbers; is "+("undefined"==typeof e?"undefined":n(e)));i.init_by_array(e)}},function(e,t,r){"use strict";e.exports={_meta:{id:"sv-SE",fallback:null,language:"Swedish",country:"Sweden",countryCode:"SE"},names:r(135),phone:r(139),address:r(140),company:r(142),internet:r(143)}},function(e,t,r){"use strict";e.exports={firstNameM:r(136),firstNameF:r(137),lastNameM:r(138),lastNameF:r(138),prefix:["Dr.","Prof.","PhD."],suffix:[],nameM:["#{names.prefix} #{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}"],nameF:["#{names.prefix} #{names.firstNameM} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}"]}},function(e,t){"use strict";e.exports=["Erik","Lars","Karl","Anders","Per","Johan","Nils","Lennart","Emil","Hans"]},function(e,t){"use strict";e.exports=["Maria","Anna","Margareta","Elisabeth","Eva","Birgitta","Kristina","Karin","Elisabet","Marie"]},function(e,t){"use strict";e.exports=["Johansson","Andersson","Karlsson","Nilsson","Eriksson","Larsson","Olsson","Persson","Svensson","Gustafsson"]},function(e,t){"use strict";e.exports={number:["####-#####","####-######"]}},function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}var i=r(141),a=n(i);e.exports={countryAndCode:function(){var e=this.random.objectElement(a.default);return{code:Object.keys(e)[0],name:e[Object.keys(e)[0]]}},state:["Blekinge","Dalarna","Gotland","Gävleborg","Göteborg","Halland","Jämtland","Jönköping","Kalmar","Kronoberg","Norrbotten","Skaraborg","Skåne","Stockholm","Södermanland","Uppsala","Värmland","Västerbotten","Västernorrland","Västmanland","Älvsborg","Örebro","Östergötland"],stateAbbr:[],city:["#{address.cityPrefix}#{address.citySuffix}"],cityPrefix:["Söder","Norr","Väst","Öster","Aling","Ar","Av","Bo","Br","Bå","Ek","En","Esk","Fal","Gäv","Göte","Ha","Helsing","Karl","Krist","Kram","Kung","Kö","Lyck","Ny"],citySuffix:["stad","land","sås","ås","holm","tuna","sta","berg","löv","borg","mora","hamn","fors","köping","by","hult","torp","fred","vik"],street:["#{address.streetName} #{address.buildingNumber}"],streetName:["#{address.streetRoot}#{address.streetSuffix}","#{address.streetNames.prefix} #{address.streetRoot}#{address.streetSuffix}","#{names.firstName}#{address.commonStreetSuffix}","#{names.lastName}#{address.commonStreetSuffix}"],streetPrefix:["Västra","Östra","Norra","Södra","Övre","Undre"],streetRoot:["Björk","Järnvägs","Ring","Skol","Skogs","Ny","Gran","Idrotts","Stor","Kyrk","Industri","Park","Strand","Skol","Trädgård","Ängs","Kyrko","Villa","Ek","Kvarn","Stations","Back","Furu","Gen","Fabriks","Åker","Bäck","Asp"],streetSuffix:["vägen","gatan","gränden","gärdet","allén"],commonStreetSuffix:["s Väg","s Gata"],buildingNumber:["###","##","#"],postCode:["#####"]}},function(e,t){"use strict";e.exports={AF:"Afghanistan",AL:"Albanien",DZ:"Algeriet",VI:"Amerikanska Jungfruöarna",AS:"Amerikanska Samoa",AD:"Andorra",AO:"Angola",AI:"Anguilla",AQ:"Antarktis",AG:"Antigua och Barbuda",AR:"Argentina",AM:"Armenien",AW:"Aruba",AC:"Ascension",AU:"Australien",AZ:"Azerbajdzjan",BS:"Bahamas",BH:"Bahrain",BD:"Bangladesh",BB:"Barbados",BE:"Belgien",BZ:"Belize",BJ:"Benin",BM:"Bermuda",BT:"Bhutan",BO:"Bolivia",BA:"Bosnien och Hercegovina",BW:"Botswana",BR:"Brasilien",VG:"Brittiska Jungfruöarna",IO:"Brittiska territoriet i Indiska oceanen",BN:"Brunei",BG:"Bulgarien",BF:"Burkina Faso",BI:"Burundi",KY:"Caymanöarna",CF:"Centralafrikanska republiken",EA:"Ceuta och Melilla",CL:"Chile",CO:"Colombia",CK:"Cooköarna",CR:"Costa Rica",CW:"Curaçao",CY:"Cypern",DK:"Danmark",DG:"Diego Garcia",DJ:"Djibouti",DM:"Dominica",DO:"Dominikanska republiken",EC:"Ecuador",EG:"Egypten",GQ:"Ekvatorialguinea",SV:"El Salvador",CI:"Elfenbenskusten",ER:"Eritrea",EE:"Estland",ET:"Etiopien",FK:"Falklandsöarna",FJ:"Fiji",PH:"Filippinerna",FI:"Finland",FR:"Frankrike",GF:"Franska Guyana",PF:"Franska Polynesien",TF:"Franska sydterritorierna",FO:"Färöarna",AE:"Förenade Arabemiraten",GA:"Gabon",GM:"Gambia",GE:"Georgien",GH:"Ghana",GI:"Gibraltar",GR:"Grekland",GD:"Grenada",GL:"Grönland",GP:"Guadeloupe",GU:"Guam",GT:"Guatemala",GG:"Guernsey",GN:"Guinea",
GW:"Guinea-Bissau",GY:"Guyana",HT:"Haiti",HN:"Honduras",HK:"Hongkong (S.A.R. Kina)",IN:"Indien",ID:"Indonesien",IQ:"Irak",IR:"Iran",IE:"Irland",IS:"Island",IM:"Isle of Man",IL:"Israel",IT:"Italien",JM:"Jamaica",JP:"Japan",YE:"Jemen",JE:"Jersey",JO:"Jordanien",CX:"Julön",KH:"Kambodja",CM:"Kamerun",CA:"Kanada",IC:"Kanarieöarna",CV:"Kap Verde",BQ:"Karibiska Nederländerna",KZ:"Kazakstan",KE:"Kenya",CN:"Kina",KG:"Kirgizistan",KI:"Kiribati",CC:"Kokosöarna",KM:"Komorerna",CG:"Kongo-Brazzaville",CD:"Kongo-Kinshasa",XK:"Kosovo",HR:"Kroatien",CU:"Kuba",KW:"Kuwait",LA:"Laos",LS:"Lesotho",LV:"Lettland",LB:"Libanon",LR:"Liberia",LY:"Libyen",LI:"Liechtenstein",LT:"Litauen",LU:"Luxemburg",MO:"Macao (S.A.R. Kina)",MG:"Madagaskar",MK:"Makedonien",MW:"Malawi",MY:"Malaysia",MV:"Maldiverna",ML:"Mali",MT:"Malta",MA:"Marocko",MH:"Marshallöarna",MQ:"Martinique",MR:"Mauretanien",MU:"Mauritius",YT:"Mayotte",MX:"Mexiko",FM:"Mikronesien",MZ:"Moçambique",MD:"Moldavien",MC:"Monaco",MN:"Mongoliet",ME:"Montenegro",MS:"Montserrat",MM:"Myanmar (Burma)",NA:"Namibia",NR:"Nauru",NL:"Nederländerna",NP:"Nepal",NI:"Nicaragua",NE:"Niger",NG:"Nigeria",NU:"Niue",KP:"Nordkorea",MP:"Nordmarianerna",NF:"Norfolkön",NO:"Norge",NC:"Nya Kaledonien",NZ:"Nya Zeeland",OM:"Oman",PK:"Pakistan",PW:"Palau",PS:"Palestinska territorierna",PA:"Panama",PG:"Papua Nya Guinea",PY:"Paraguay",PE:"Peru",PN:"Pitcairnöarna",PL:"Polen",PT:"Portugal",PR:"Puerto Rico",QA:"Qatar",RE:"Réunion",RO:"Rumänien",RW:"Rwanda",RU:"Ryssland",BL:"S:t Barthélemy",SH:"S:t Helena",KN:"S:t Kitts och Nevis",LC:"S:t Lucia",MF:"S:t Martin",PM:"S:t Pierre och Miquelon",VC:"S:t Vincent och Grenadinerna",SB:"Salomonöarna",WS:"Samoa",SM:"San Marino",ST:"São Tomé och Príncipe",SA:"Saudiarabien",CH:"Schweiz",SN:"Senegal",RS:"Serbien",SC:"Seychellerna",SL:"Sierra Leone",SG:"Singapore",SX:"Sint Maarten",SK:"Slovakien",SI:"Slovenien",SO:"Somalia",ES:"Spanien",LK:"Sri Lanka",GB:"Storbritannien",SD:"Sudan",SR:"Surinam",SJ:"Svalbard och Jan Mayen",SE:"Sverige",SZ:"Swaziland",ZA:"Sydafrika",GS:"Sydgeorgien och Sydsandwichöarna",KR:"Sydkorea",SS:"Sydsudan",SY:"Syrien",TJ:"Tadzjikistan",TW:"Taiwan",TZ:"Tanzania",TD:"Tchad",TH:"Thailand",CZ:"Tjeckien",TG:"Togo",TK:"Tokelau",TO:"Tonga",TT:"Trinidad och Tobago",TA:"Tristan da Cunha",TN:"Tunisien",TR:"Turkiet",TM:"Turkmenistan",TC:"Turks- och Caicosöarna",TV:"Tuvalu",DE:"Tyskland",UG:"Uganda",UA:"Ukraina",HU:"Ungern",UY:"Uruguay",US:"USA",UM:"USA:s yttre öar",UZ:"Uzbekistan",VU:"Vanuatu",VA:"Vatikanstaten",VE:"Venezuela",VN:"Vietnam",BY:"Vitryssland",EH:"Västsahara",WF:"Wallis- och Futunaöarna",ZM:"Zambia",ZW:"Zimbabwe",AX:"Åland",AT:"Österrike",TL:"Östtimor"}},function(e,t){"use strict";e.exports={name:["#{names.lastName} #{company.suffix}","#{names.lastName}-#{company.suffix}","#{names.lastName}, #{names.lastName} #{company.suffix}"],suffix:["Gruppen","AB","HB","Group","Investment","Kommanditbolag","Aktiebolag"]}},function(e,t){"use strict";e.exports={tld:["se","nu","info","com","org"]}},function(e,t,r){"use strict";e.exports={_meta:{id:"default",fallback:null,mask:"#{([A-Za-z0-9_.]+)}",language:"English",country:"United Kingdom",countryCode:"UK"},names:r(145),phone:r(149),address:r(150),company:r(156),internet:r(157),lorem:r(220),date:r(223),misc:r(225),entity:r(231)}},function(e,t,r){"use strict";e.exports={firstNameM:r(146),firstNameF:r(147),firstName:["#{names.firstNameM}","#{names.firstNameF}"],lastNameM:r(148),lastNameF:r(148),lastName:["#{names.lastNameM}","#{names.lastNameF}"],prefix:["Mr.","Mrs.","Ms.","Miss","Dr."],suffix:["Jr.","Sr.","I","II","III","IV","V","MD","DDS","PhD","DVM"],nameM:["#{names.prefix} #{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM} #{names.suffix}","#{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}","#{names.firstNameM} #{names.lastNameM}"],nameF:["#{names.prefix} #{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF} #{names.suffix}","#{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}","#{names.firstNameF} #{names.lastNameF}"],name:["#{names.nameM}","#{names.nameF}"]}},function(e,t,r){(function(e){"use strict";e.exports=["James","John","Robert","Michael","William","David","Richard","Charles","Joseph","Thomas","Christopher","Daniel","Paul","Mark","Donald","George","Kenneth","Steven","Edward","Brian","Ronald","Anthony","Kevin","Jason","Matthew","Gary","Timothy","Jose","Larry","Jeffrey","Frank","Scott","Eric","Stephen","Andrew","Raymond","Gregory","Joshua","Jerry","Dennis","Walter","Patrick","Peter","Harold","Douglas","Henry","Carl","Arthur","Ryan","Roger","Joe","Juan","Jack","Albert","Jonathan","Justin","Terry","Gerald","Keith","Samuel","Willie","Ralph","Lawrence","Nicholas","Roy","Benjamin","Bruce","Brandon","Adam","Harry","Fred","Wayne","Billy","Steve","Louis","Jeremy","Aaron","Randy","Howard","Eugene","Carlos","Russell","Bobby","Victor","Martin","Ernest","Phillip","Todd","Jesse","Craig","Alan","Shawn","Clarence","Sean","Philip","Chris","Johnny","Earl","Jimmy","Antonio","Danny","Bryan","Tony","Luis","Mike","Stanley","Leonard","Nathan","Dale","Manuel","Rodney","Curtis","Norman","Allen","Marvin","Vincent","Glenn","Jeffery","Travis","Jeff","Chad","Jacob","Lee","Melvin","Alfred","Kyle","Francis","Bradley","Jesus","Herbert","Frederick","Ray","Joel","Edwin","Don","Eddie","Ricky","Troy","Randall","Barry","Alexander","Bernard","Mario","Leroy","Francisco","Marcus","Micheal","Theodore","Clifford","Miguel","Oscar","Jay","Jim","Tom","Calvin","Alex","Jon","Ronnie","Bill","Lloyd","Tommy","Leon","Derek","Warren","Darrell","Jerome","Floyd","Leo","Alvin","Tim","Wesley","Gordon","Dean","Greg","Jorge","Dustin","Pedro","Derrick","Dan","Lewis","Zachary","Corey","Herman","Maurice","Vernon","Roberto","Clyde","Glen","Hector","Shane","Ricardo","Sam","Rick","Lester","Brent","Ramon","Charlie","Tyler","Gilbert","Gene","Marc","Reginald","Ruben","Brett","Angel","Nathaniel","Rafael","Leslie","Edgar","Milton","Raul","Ben","Chester","Cecil","Duane","Franklin","Andre","Elmer","Brad","Gabriel","Ron","Mitchell","Roland","Arnold","Harvey","Jared","Adrian","Karl","Cory","Claude","Erik","Darryl","Jamie","Neil","Jessie","Christian","Javier","Fernando","Clinton","Ted","Mathew","Tyrone","Darren","Lonnie","Lance","Cody","Julio","Kelly","Kurt","Allan","Nelson","Guy","Clayton","Hugh","Max","Dwayne","Dwight","Armando","Felix","Jimmie","Everett","Jordan","Ian","Wallace","Ken","Bob","Jaime","Casey","Alfredo","Alberto","Dave","Ivan","Johnnie","Sidney","Byron","Julian","Isaac","Morris","Clifton","Willard","Daryl","Ross","Virgil","Andy","Marshall","Salvador","Perry","Kirk","Sergio","Marion","Tracy","Seth","Kent","Terrance","Rene","Eduardo","Terrence","Enrique","Freddie","Wade","Austin","Stuart","Fredrick","Arturo","Alejandro","Jackie","Joey","Nick","Luther","Wendell","Jeremiah","Evan","Julius","Dana","Donnie","Otis","Shannon","Trevor","Oliver","Luke","Homer","Gerard","Doug","Kenny","Hubert","Angelo","Shaun","Lyle","Matt","Lynn","Alfonso","Orlando","Rex","Carlton","Ernesto","Cameron","Neal","Pablo","Lorenzo","Omar","Wilbur","Blake","Grant","Horace","Roderick","Kerry","Abraham","Willis","Rickey","Jean","Ira","Andres","Cesar","Johnathan","Malcolm","Rudolph","Damon","Kelvin","Rudy","Preston","Alton","Archie","Marco","Wm","Pete","Randolph","Garry","Geoffrey","Jonathon","Felipe","Bennie","Gerardo","Ed","Dominic","Robin","Loren","Delbert","Colin","Guillermo","Earnest","Lucas","Benny","Noel","Spencer","Rodolfo","Myron","Edmund","Garrett","Salvatore","Cedric","Lowell","Gregg","Sherman","Wilson","Devin","Sylvester","Kim","Roosevelt","Israel","Jermaine","Forrest","Wilbert","Leland","Simon","Guadalupe","Clark","Irving","Carroll","Bryant","Owen","Rufus","Woodrow","Sammy","Kristopher","Mack","Levi","Marcos","Gustavo","Jake","Lionel","Marty","Taylor","Ellis","Dallas","Gilberto","Clint","Nicolas","Laurence","Ismael","Orville","Drew","Jody","Ervin","Dewey","Al","Wilfred","Josh","Hugo","Ignacio","Caleb","Tomas","Sheldon","Erick","Frankie","Stewart","Doyle","Darrel","Rogelio","Terence","Santiago","Alonzo","Elias","Bert","Elbert","Ramiro","Conrad","Pat","Noah","Grady","Phil","Cornelius","Lamar","Rolando","Clay","Percy","Dexter","Bradford","Merle","Darin","Amos","Terrell","Moses","Irvin","Saul","Roman","Darnell","Randal","Tommie","Timmy","Darrin","Winston","Brendan","Toby","Van","Abel","Dominick","Boyd","Courtney","Jan","Emilio","Elijah","Cary","Domingo","Santos","Aubrey","Emmett","Marlon","Emanuel","Jerald","Edmond"]}).call(t,r(56)(e))},function(e,t,r){(function(e){"use strict";e.exports=["Mary","Patricia","Linda","Barbara","Elizabeth","Jennifer","Maria","Susan","Margaret","Dorothy","Lisa","Nancy","Karen","Betty","Helen","Sandra","Donna","Carol","Ruth","Sharon","Michelle","Laura","Sarah","Kimberly","Deborah","Jessica","Shirley","Cynthia","Angela","Melissa","Brenda","Amy","Anna","Rebecca","Virginia","Kathleen","Pamela","Martha","Debra","Amanda","Stephanie","Carolyn","Christine","Marie","Janet","Catherine","Frances","Ann","Joyce","Diane","Alice","Julie","Heather","Teresa","Doris","Gloria","Evelyn","Jean","Cheryl","Mildred","Katherine","Joan","Ashley","Judith","Rose","Janice","Kelly","Nicole","Judy","Christina","Kathy","Theresa","Beverly","Denise","Tammy","Irene","Jane","Lori","Rachel","Marilyn","Andrea","Kathryn","Louise","Sara","Anne","Jacqueline","Wanda","Bonnie","Julia","Ruby","Lois","Tina","Phyllis","Norma","Paula","Diana","Annie","Lillian","Emily","Robin","Peggy","Crystal","Gladys","Rita","Dawn","Connie","Florence","Tracy","Edna","Tiffany","Carmen","Rosa","Cindy","Grace","Wendy","Victoria","Edith","Kim","Sherry","Sylvia","Josephine","Thelma","Shannon","Sheila","Ethel","Ellen","Elaine","Marjorie","Carrie","Charlotte","Monica","Esther","Pauline","Emma","Juanita","Anita","Rhonda","Hazel","Amber","Eva","Debbie","April","Leslie","Clara","Lucille","Jamie","Joanne","Eleanor","Valerie","Danielle","Megan","Alicia","Suzanne","Michele","Gail","Bertha","Darlene","Veronica","Jill","Erin","Geraldine","Lauren","Cathy","Joann","Lorraine","Lynn","Sally","Regina","Erica","Beatrice","Dolores","Bernice","Audrey","Yvonne","Annette","June","Samantha","Marion","Dana","Stacy","Ana","Renee","Ida","Vivian","Roberta","Holly","Brittany","Melanie","Loretta","Yolanda","Jeanette","Laurie","Katie","Kristen","Vanessa","Alma","Sue","Elsie","Beth","Jeanne","Vicki","Carla","Tara","Rosemary","Eileen","Terri","Gertrude","Lucy","Tonya","Ella","Stacey","Wilma","Gina","Kristin","Jessie","Natalie","Agnes","Vera","Willie","Charlene","Bessie","Delores","Melinda","Pearl","Arlene","Maureen","Colleen","Allison","Tamara","Joy","Georgia","Constance","Lillie","Claudia","Jackie","Marcia","Tanya","Nellie","Minnie","Marlene","Heidi","Glenda","Lydia","Viola","Courtney","Marian","Stella","Caroline","Dora","Jo","Vickie","Mattie","Terry","Maxine","Irma","Mabel","Marsha","Myrtle","Lena","Christy","Deanna","Patsy","Hilda","Gwendolyn","Jennie","Nora","Margie","Nina","Cassandra","Leah","Penny","Kay","Priscilla","Naomi","Carole","Brandy","Olga","Billie","Dianne","Tracey","Leona","Jenny","Felicia","Sonia","Miriam","Velma","Becky","Bobbie","Violet","Kristina","Toni","Misty","Mae","Shelly","Daisy","Ramona","Sherri","Erika","Katrina","Claire","Lindsey","Lindsay","Geneva","Guadalupe","Belinda","Margarita","Sheryl","Cora","Faye","Ada","Natasha","Sabrina","Isabel","Marguerite","Hattie","Harriet","Molly","Cecilia","Kristi","Brandi","Blanche","Sandy","Rosie","Joanna","Iris","Eunice","Angie","Inez","Lynda","Madeline","Amelia","Alberta","Genevieve","Monique","Jodi","Janie","Maggie","Kayla","Sonya","Jan","Lee","Kristine","Candace","Fannie","Maryann","Opal","Alison","Yvette","Melody","Luz","Susie","Olivia","Flora","Shelley","Kristy","Mamie","Lula","Lola","Verna","Beulah","Antoinette","Candice","Juana","Jeannette","Pam","Kelli","Hannah","Whitney","Bridget","Karla","Celia","Latoya","Patty","Shelia","Gayle","Della","Vicky","Lynne","Sheri","Marianne","Kara","Jacquelyn","Erma","Blanca","Myra","Leticia","Pat","Krista","Roxanne","Angelica","Johnnie","Robyn","Francis","Adrienne","Rosalie","Alexandra","Brooke","Bethany","Sadie","Bernadette","Traci","Jody","Kendra","Jasmine","Nichole","Rachael","Chelsea","Mable","Ernestine","Muriel","Marcella","Elena","Krystal","Angelina","Nadine","Kari","Estelle","Dianna","Paulette","Lora","Mona","Doreen","Rosemarie","Angel","Desiree","Antonia","Hope","Ginger","Janis","Betsy","Christie","Freda","Mercedes","Meredith","Lynette","Teri","Cristina","Eula","Leigh","Meghan","Sophia","Eloise","Rochelle","Gretchen","Cecelia","Raquel","Henrietta","Alyssa","Jana","Kelley","Gwen","Kerry","Jenna","Tricia","Laverne","Olive","Alexis","Tasha","Silvia","Elvira","Casey","Delia","Sophie","Kate","Patti","Lorena","Kellie","Sonja","Lila","Lana","Darla","May","Mindy","Essie","Mandy","Lorene","Elsa","Josefina","Jeannie","Miranda","Dixie","Lucia","Marta","Faith","Lela","Johanna","Shari","Camille","Tami","Shawna","Elisa","Ebony","Melba","Ora","Nettie","Tabitha","Ollie","Jaime","Winifred","Kristie"]}).call(t,r(56)(e))},function(e,t){"use strict";e.exports=["Abbott","Abernathy","Abshire","Adams","Altenwerth","Anderson","Ankunding","Armstrong","Auer","Aufderhar","Bahringer","Bailey","Balistreri","Barrows","Bartell","Bartoletti","Barton","Bashirian","Batz","Bauch","Baumbach","Bayer","Beahan","Beatty","Bechtelar","Becker","Bednar","Beer","Beier","Berge","Bergnaum","Bergstrom","Bernhard","Bernier","Bins","Blanda","Blick","Block","Bode","Boehm","Bogan","Bogisich","Borer","Bosco","Botsford","Boyer","Boyle","Bradtke","Brakus","Braun","Breitenberg","Brekke","Brown","Bruen","Buckridge","Carroll","Carter","Cartwright","Casper","Cassin","Champlin","Christiansen","Cole","Collier","Collins","Conn","Connelly","Conroy","Considine","Corkery","Cormier","Corwin","Cremin","Crist","Crona","Cronin","Crooks","Cruickshank","Cummerata","Cummings","Dach","D'Amore","Daniel","Dare","Daugherty","Davis","Deckow","Denesik","Dibbert","Dickens","Dicki","Dickinson","Dietrich","Donnelly","Dooley","Douglas","Doyle","DuBuque","Durgan","Ebert","Effertz","Eichmann","Emard","Emmerich","Erdman","Ernser","Fadel","Fahey","Farrell","Fay","Feeney","Feest","Feil","Ferry","Fisher","Flatley","Frami","Franecki","Friesen","Fritsch","Funk","Gaylord","Gerhold","Gerlach","Gibson","Gislason","Gleason","Gleichner","Glover","Goldner","Goodwin","Gorczany","Gottlieb","Goyette","Grady","Graham","Grant","Green","Greenfelder","Greenholt","Grimes","Gulgowski","Gusikowski","Gutkowski","Gutmann","Haag","Hackett","Hagenes","Hahn","Haley","Halvorson","Hamill","Hammes","Hand","Hane","Hansen","Harber","Harris","Hartmann","Harvey","Hauck","Hayes","Heaney","Heathcote","Hegmann","Heidenreich","Heller","Herman","Hermann","Hermiston","Herzog","Hessel","Hettinger","Hickle","Hilll","Hills","Hilpert","Hintz","Hirthe","Hodkiewicz","Hoeger","Homenick","Hoppe","Howe","Howell","Hudson","Huel","Huels","Hyatt","Jacobi","Jacobs","Jacobson","Jakubowski","Jaskolski","Jast","Jenkins","Jerde","Johns","Johnson","Johnston","Jones","Kassulke","Kautzer","Keebler","Keeling","Kemmer","Kerluke","Kertzmann","Kessler","Kiehn","Kihn","Kilback","King","Kirlin","Klein","Kling","Klocko","Koch","Koelpin","Koepp","Kohler","Konopelski","Koss","Kovacek","Kozey","Krajcik","Kreiger","Kris","Kshlerin","Kub","Kuhic","Kuhlman","Kuhn","Kulas","Kunde","Kunze","Kuphal","Kutch","Kuvalis","Labadie","Lakin","Lang","Langosh","Langworth","Larkin","Larson","Leannon","Lebsack","Ledner","Leffler","Legros","Lehner","Lemke","Lesch","Leuschke","Lind","Lindgren","Littel","Little","Lockman","Lowe","Lubowitz","Lueilwitz","Luettgen","Lynch","Macejkovic","MacGyver","Maggio","Mann","Mante","Marks","Marquardt","Marvin","Mayer","Mayert","McClure","McCullough","McDermott","McGlynn","McKenzie","McLaughlin","Medhurst","Mertz","Metz","Miller","Mills","Mitchell","Moen","Mohr","Monahan","Moore","Morar","Morissette","Mosciski","Mraz","Mueller","Muller","Murazik","Murphy","Murray","Nader","Nicolas","Nienow","Nikolaus","Nitzsche","Nolan","Oberbrunner","O'Connell","O'Conner","O'Hara","O'Keefe","O'Kon","Okuneva","Olson","Ondricka","O'Reilly","Orn","Ortiz","Osinski","Pacocha","Padberg","Pagac","Parisian","Parker","Paucek","Pfannerstill","Pfeffer","Pollich","Pouros","Powlowski","Predovic","Price","Prohaska","Prosacco","Purdy","Quigley","Quitzon","Rath","Ratke","Rau","Raynor","Reichel","Reichert","Reilly","Reinger","Rempel","Renner","Reynolds","Rice","Rippin","Ritchie","Robel","Roberts","Rodriguez","Rogahn","Rohan","Rolfson","Romaguera","Roob","Rosenbaum","Rowe","Ruecker","Runolfsdottir","Runolfsson","Runte","Russel","Rutherford","Ryan","Sanford","Satterfield","Sauer","Sawayn","Schaden","Schaefer","Schamberger","Schiller","Schimmel","Schinner","Schmeler","Schmidt","Schmitt","Schneider","Schoen","Schowalter","Schroeder","Schulist","Schultz","Schumm","Schuppe","Schuster","Senger","Shanahan","Shields","Simonis","Sipes","Skiles","Smith","Smitham","Spencer","Spinka","Sporer","Stamm","Stanton","Stark","Stehr","Steuber","Stiedemann","Stokes","Stoltenberg","Stracke","Streich","Stroman","Strosin","Swaniawski","Swift","Terry","Thiel","Thompson","Tillman","Torp","Torphy","Towne","Toy","Trantow","Tremblay","Treutel","Tromp","Turcotte","Turner","Ullrich","Upton","Vandervort","Veum","Volkman","Von","VonRueden","Waelchi","Walker","Walsh","Walter","Ward","Waters","Watsica","Weber","Wehner","Weimann","Weissnat","Welch","West","White","Wiegand","Wilderman","Wilkinson","Will","Williamson","Willms","Windler","Wintheiser","Wisoky","Wisozk","Witting","Wiza","Wolf","Wolff","Wuckert","Wunsch","Wyman","Yost","Yundt","Zboncak","Zemlak","Ziemann","Zieme","Zulauf"]},function(e,t){"use strict";e.exports={number:["###-###-####","(###) ###-####","1-###-###-####","###.###.####","###-###-####","(###) ###-####","1-###-###-####","###.###.####","###-###-#### x###","(###) ###-#### x###","1-###-###-#### x###","###.###.#### x###","###-###-#### x####","(###) ###-#### x####","1-###-###-#### x####","###.###.#### x####","###-###-#### x#####","(###) ###-#### x#####","1-###-###-#### x#####","###.###.#### x#####"]}},function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}var i=r(151),a=n(i);e.exports={countryAndCode:function(){var e=this.random.objectElement(a.default);return{code:Object.keys(e)[0],name:e[Object.keys(e)[0]]}},country:function(){return this.address.countryAndCode().name},countryCode:function(){return this.address.countryAndCode().code},state:r(152),stateAbbr:r(153),city:["#{address.cityPrefix} #{names.firstName}#{address.citySuffix}","#{address.cityPrefix} #{names.firstName}","#{names.firstName}#{address.citySuffix}","#{names.lastName}#{address.citySuffix}"],cityPrefix:["North","East","West","South","New","Lake","Port"],citySuffix:["town","ton","land","ville","berg","burgh","borough","bury","view","port","mouth","stad","furt","chester","mouth","fort","haven","side","shire"],street:["#{address.buildingNumber} #{address.streetName}","#{address.buildingNumber} #{address.streetName}","#{address.buildingNumber} #{address.streetName} Apt. ###","#{address.buildingNumber} #{address.streetName} Suite ###"],streetName:["#{names.firstName} #{address.streetSuffix}","#{names.lastName} #{address.streetSuffix}"],streetSuffix:r(154),buildingNumber:["#####","####","###"],postCode:["#####","#####-####"],geoLocation:function(){return{latitude:this.random.number(18e5)/1e4-90,longitude:this.random.number(36e5)/1e4-180}},altitude:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};return this.random.number(e.min||0,e.max||8848)},geoLocationNearBy:r(155)}},function(e,t){"use strict";e.exports={AF:"Afghanistan",AX:"Åland Islands",AL:"Albania",DZ:"Algeria",AS:"American Samoa",AD:"Andorra",AO:"Angola",AI:"Anguilla",AQ:"Antarctica",AG:"Antigua & Barbuda",AR:"Argentina",AM:"Armenia",AW:"Aruba",AC:"Ascension Island",AU:"Australia",AT:"Austria",AZ:"Azerbaijan",BS:"Bahamas",BH:"Bahrain",BD:"Bangladesh",BB:"Barbados",BY:"Belarus",BE:"Belgium",BZ:"Belize",BJ:"Benin",BM:"Bermuda",BT:"Bhutan",BO:"Bolivia",BA:"Bosnia & Herzegovina",BW:"Botswana",BR:"Brazil",IO:"British Indian Ocean Territory",VG:"British Virgin Islands",BN:"Brunei",BG:"Bulgaria",BF:"Burkina Faso",BI:"Burundi",KH:"Cambodia",CM:"Cameroon",CA:"Canada",IC:"Canary Islands",CV:"Cape Verde",BQ:"Caribbean Netherlands",KY:"Cayman Islands",CF:"Central African Republic",EA:"Ceuta & Melilla",TD:"Chad",CL:"Chile",CN:"China",CX:"Christmas Island",CC:"Cocos (Keeling) Islands",CO:"Colombia",KM:"Comoros",CG:"Congo - Brazzaville",CD:"Congo - Kinshasa",CK:"Cook Islands",CR:"Costa Rica",CI:"Côte d’Ivoire",HR:"Croatia",CU:"Cuba",CW:"Curaçao",CY:"Cyprus",CZ:"Czech Republic",DK:"Denmark",DG:"Diego Garcia",DJ:"Djibouti",DM:"Dominica",DO:"Dominican Republic",EC:"Ecuador",EG:"Egypt",SV:"El Salvador",GQ:"Equatorial Guinea",ER:"Eritrea",EE:"Estonia",ET:"Ethiopia",FK:"Falkland Islands",FO:"Faroe Islands",FJ:"Fiji",FI:"Finland",FR:"France",GF:"French Guiana",PF:"French Polynesia",TF:"French Southern Territories",GA:"Gabon",GM:"Gambia",GE:"Georgia",DE:"Germany",GH:"Ghana",GI:"Gibraltar",GR:"Greece",GL:"Greenland",GD:"Grenada",GP:"Guadeloupe",GU:"Guam",GT:"Guatemala",GG:"Guernsey",GN:"Guinea",GW:"Guinea-Bissau",GY:"Guyana",HT:"Haiti",HN:"Honduras",HK:"Hong Kong SAR China",HU:"Hungary",IS:"Iceland",IN:"India",ID:"Indonesia",IR:"Iran",IQ:"Iraq",IE:"Ireland",IM:"Isle of Man",IL:"Israel",IT:"Italy",JM:"Jamaica",JP:"Japan",JE:"Jersey",JO:"Jordan",KZ:"Kazakhstan",KE:"Kenya",KI:"Kiribati",XK:"Kosovo",KW:"Kuwait",KG:"Kyrgyzstan",LA:"Laos",LV:"Latvia",LB:"Lebanon",LS:"Lesotho",LR:"Liberia",LY:"Libya",LI:"Liechtenstein",LT:"Lithuania",LU:"Luxembourg",MO:"Macau SAR China",MK:"Macedonia",MG:"Madagascar",MW:"Malawi",MY:"Malaysia",MV:"Maldives",ML:"Mali",MT:"Malta",MH:"Marshall Islands",MQ:"Martinique",MR:"Mauritania",MU:"Mauritius",YT:"Mayotte",MX:"Mexico",FM:"Micronesia",MD:"Moldova",MC:"Monaco",MN:"Mongolia",ME:"Montenegro",MS:"Montserrat",MA:"Morocco",MZ:"Mozambique",MM:"Myanmar (Burma)",NA:"Namibia",NR:"Nauru",NP:"Nepal",NL:"Netherlands",NC:"New Caledonia",NZ:"New Zealand",NI:"Nicaragua",NE:"Niger",NG:"Nigeria",NU:"Niue",NF:"Norfolk Island",KP:"North Korea",MP:"Northern Mariana Islands",NO:"Norway",OM:"Oman",PK:"Pakistan",PW:"Palau",PS:"Palestinian Territories",PA:"Panama",PG:"Papua New Guinea",PY:"Paraguay",PE:"Peru",PH:"Philippines",PN:"Pitcairn Islands",PL:"Poland",PT:"Portugal",PR:"Puerto Rico",QA:"Qatar",RE:"Réunion",RO:"Romania",RU:"Russia",RW:"Rwanda",WS:"Samoa",SM:"San Marino",ST:"São Tomé & Príncipe",SA:"Saudi Arabia",SN:"Senegal",RS:"Serbia",SC:"Seychelles",SL:"Sierra Leone",SG:"Singapore",SX:"Sint Maarten",SK:"Slovakia",SI:"Slovenia",SB:"Solomon Islands",SO:"Somalia",ZA:"South Africa",GS:"South Georgia & South Sandwich Islands",KR:"South Korea",SS:"South Sudan",ES:"Spain",LK:"Sri Lanka",BL:"St. Barthélemy",SH:"St. Helena",KN:"St. Kitts & Nevis",LC:"St. Lucia",MF:"St. Martin",PM:"St. Pierre & Miquelon",VC:"St. Vincent & Grenadines",SD:"Sudan",SR:"Suriname",SJ:"Svalbard & Jan Mayen",SZ:"Swaziland",SE:"Sweden",CH:"Switzerland",SY:"Syria",TW:"Taiwan",TJ:"Tajikistan",TZ:"Tanzania",TH:"Thailand",TL:"Timor-Leste",TG:"Togo",TK:"Tokelau",TO:"Tonga",TT:"Trinidad & Tobago",TA:"Tristan da Cunha",TN:"Tunisia",TR:"Turkey",TM:"Turkmenistan",TC:"Turks & Caicos Islands",TV:"Tuvalu",UM:"U.S. Outlying Islands",VI:"U.S. Virgin Islands",UG:"Uganda",UA:"Ukraine",AE:"United Arab Emirates",GB:"United Kingdom",US:"United States",UY:"Uruguay",UZ:"Uzbekistan",VU:"Vanuatu",VA:"Vatican City",VE:"Venezuela",VN:"Vietnam",WF:"Wallis & Futuna",EH:"Western Sahara",YE:"Yemen",ZM:"Zambia",ZW:"Zimbabwe"}},function(e,t,r){(function(e){"use strict";e.exports=["Alabama","Alaska","Arizona","Arkansas","California","Colorado","Connecticut","Delaware","Florida","Georgia","Hawaii","Idaho","Illinois","Indiana","Iowa","Kansas","Kentucky","Louisiana","Maine","Maryland","Massachusetts","Michigan","Minnesota","Mississippi","Missouri","Montana","Nebraska","Nevada","New Hampshire","New Jersey","New Mexico","New York","North Carolina","North Dakota","Ohio","Oklahoma","Oregon","Pennsylvania","Rhode Island","South Carolina","South Dakota","Tennessee","Texas","Utah","Vermont","Virginia","Washington","West Virginia","Wisconsin","Wyoming"]}).call(t,r(56)(e))},function(e,t,r){(function(e){"use strict";e.exports=["AL","AK","AZ","AR","CA","CO","CT","DE","FL","GA","HI","ID","IL","IN","IA","KS","KY","LA","ME","MD","MA","MI","MN","MS","MO","MT","NE","NV","NH","NJ","NM","NY","NC","ND","OH","OK","OR","PA","RI","SC","SD","TN","TX","UT","VT","VA","WA","WV","WI","WY"]}).call(t,r(56)(e))},function(e,t,r){(function(e){"use strict";e.exports=["Alley","Avenue","Branch","Bridge","Brook","Brooks","Burg","Burgs","Bypass","Camp","Canyon","Cape","Causeway","Center","Centers","Circle","Circles","Cliff","Cliffs","Club","Common","Corner","Corners","Course","Court","Courts","Cove","Coves","Creek","Crescent","Crest","Crossing","Crossroad","Curve","Dale","Dam","Divide","Drive","Drive","Drives","Estate","Estates","Expressway","Extension","Extensions","Fall","Falls","Ferry","Field","Fields","Flat","Flats","Ford","Fords","Forest","Forge","Forges","Fork","Forks","Fort","Freeway","Garden","Gardens","Gateway","Glen","Glens","Green","Greens","Grove","Groves","Harbor","Harbors","Haven","Heights","Highway","Hill","Hills","Hollow","Inlet","Inlet","Island","Island","Islands","Islands","Isle","Isle","Junction","Junctions","Key","Keys","Knoll","Knolls","Lake","Lakes","Land","Landing","Lane","Light","Lights","Loaf","Lock","Locks","Locks","Lodge","Lodge","Loop","Mall","Manor","Manors","Meadow","Meadows","Mews","Mill","Mills","Mission","Mission","Motorway","Mount","Mountain","Mountain","Mountains","Mountains","Neck","Orchard","Oval","Overpass","Park","Parks","Parkway","Parkways","Pass","Passage","Path","Pike","Pine","Pines","Place","Plain","Plains","Plains","Plaza","Plaza","Point","Points","Port","Port","Ports","Ports","Prairie","Prairie","Radial","Ramp","Ranch","Rapid","Rapids","Rest","Ridge","Ridges","River","Road","Road","Roads","Roads","Route","Row","Rue","Run","Shoal","Shoals","Shore","Shores","Skyway","Spring","Springs","Springs","Spur","Spurs","Square","Square","Squares","Squares","Station","Station","Stravenue","Stravenue","Stream","Stream","Street","Street","Streets","Summit","Summit","Terrace","Throughway","Trace","Track","Trafficway","Trail","Trail","Tunnel","Tunnel","Turnpike","Turnpike","Underpass","Union","Unions","Valley","Valleys","Via","Viaduct","View","Views","Village","Village","Villages","Ville","Vista","Vista","Walk","Walks","Wall","Way","Ways","Well","Wells"]}).call(t,r(56)(e))},function(e,t){"use strict";e.exports=function(e){function t(e){return e*(Math.PI/180)}function r(e){return e*(180/Math.PI)}function n(e){return.621371*e}function i(e,i,a,o){var s=6378.137,u=o?a:n(a),c=t(e.latitude),l=t(e.longitude),f=Math.asin(Math.sin(c)*Math.cos(u/s)+Math.cos(c)*Math.sin(u/s)*Math.cos(i)),h=l+Math.atan2(Math.sin(i)*Math.sin(u/s)*Math.cos(c),Math.cos(u/s)-Math.sin(c)*Math.sin(f));return h>t(180)?h-=t(360):h<t(-180)&&(h+=t(360)),[r(f),r(h)]}var a=arguments.length>1&&void 0!==arguments[1]?arguments[1]:10,o=!(arguments.length>2&&void 0!==arguments[2])||arguments[2];if(void 0===e)return this.address.geoLocation();var s=i(e,t(this.random.number(360)),a,o);return{latitude:s[0],longitude:s[1]}}},function(e,t){"use strict";e.exports={name:["#{names.lastName} #{company.suffix}","#{names.lastName}-#{names.lastName} #{company.suffix}","#{names.lastName}, #{names.lastName} and #{names.lastName} #{company.suffix}"],suffix:["Ltd.","Inc.","Corp.","LLC","Group"]}},function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}var i=r(158),a=n(i),o=r(159),s=n(o),u=r(218),c=n(u);e.exports={tld:r(219),userName:function(e,t){return e=this.slugify(e?e:this.populate("#{names.firstName}")).toLowerCase(),t=this.slugify(t?t:this.populate("#{names.lastName}")).toLowerCase(),this.populate(this.random.arrayElement([e+"."+t,e+"."+t+"##",e+"."+t+"####",e+"_"+t,e+"_"+t+"##",""+e+t+"##",e+"##"]))},password:function(e,t,r,n){return(0,a.default)(e,t,r,n)},domain:function(){return this.slugify(this.populate(this.random.arrayElement(["#{names.firstName}","#{names.firstName}#{names.lastName}","#{names.firstName}-#{names.lastName}"]))).toLowerCase()+"."+this.random.arrayElement(e.exports.tld)},url:function(e,t){null==e&&(e=this.random.boolean()),null==t&&(t=!this.random.boolean());var r=e?"https://":"http://";return t&&(r+="www."),r+this.internet.domain()},emailDomain:["gmail.com","yahoo.com","hotmail.com"],email:function(e,t,r){return e=this.slugify(e?e:this.populate("#{names.firstName}")).toLowerCase(),t=this.slugify(t?t:this.populate("#{names.lastName}")).toLowerCase(),r=r?r:this.populate("#{internet.emailDomain}"),[e+"."+t+"@"+r,e+"."+t+"##@"+r,""+e+t+"##@"+r,e+"##@"+r]},imageCategories:["abstract","animals","business","cats","city","food","nightlife","fashion","people","nature","sports","technics","transport"],image:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:640,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:480,r=arguments[2],n="http://lorempixel.com/"+e+"/"+t;return r&&(n+="/"+r),n},mac:function(){return this.times(this.random.hex,6,2).join(":")},ip:function(){return this.times(this.random.number,4,1,254).join(".")},ipv6:function(){return this.times(this.random.hex,8,4).join(":")},color:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:0,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,r=arguments.length>2&&void 0!==arguments[2]?arguments[2]:0,n=Math.floor((this.random.number(256)+e)/2),i=Math.floor((this.random.number(256)+t)/2),a=Math.floor((this.random.number(256)+r)/2),o=n.toString(16),s=i.toString(16),u=a.toString(16);return(1===o.length?"0":"")+o+(1===s.length?"0":"")+s+(1===u.length?"0":"")+u},avatar:function(){return"https://s3.amazonaws.com/uifaces/faces/twitter/"+this.random.arrayElement(c.default)+"/128.jpg"},gravatar:function(e){return null==e&&(e=this.internet.email()),"https://www.gravatar.com/avatar/"+s.default.createHash("md5").update(e).digest("hex")}}},function(e,t,r){"use strict";!function(r){var n,i,a,o,s;a=/[a-zA-Z]$/,s=/[aeiouAEIOU]$/,i=/[bcdfghjklmnpqrstvwxyzBCDFGHJKLMNPQRSTVWXYZ]$/,n="generatePassword",o=function(e,t,r,n){var a,u;return null==e&&(e=10),null==t&&(t=!0),null==r&&(r=/\w/),null==n&&(n=""),n.length>=e?n:(t&&(r=n.match(i)?s:i),u=Math.floor(94*Math.random())+33,a=String.fromCharCode(u),t&&(a=a.toLowerCase()),a.match(r)?o(e,t,r,""+n+a):o(e,t,r,n))},t[n]=o,"undefined"!=typeof e&&e.exports&&(e.exports=o)}(void 0)},function(e,t,r){(function(n){function i(){var e=[].slice.call(arguments).join(" ");throw new Error([e,"we accept pull requests","http://github.com/dominictarr/crypto-browserify"].join("\n"))}function a(e,t){for(var r in e)t(e[r],r)}var o=r(164);t.createHash=r(166),t.createHmac=r(182),t.randomBytes=function(e,t){if(!t||!t.call)return new n(o(e));try{t.call(this,void 0,new n(o(e)))}catch(e){t(e)}},t.getHashes=function(){return["sha1","sha256","sha512","md5","rmd160"]};var s=r(183)(t);t.pbkdf2=s.pbkdf2,t.pbkdf2Sync=s.pbkdf2Sync,r(185)(t,e.exports),a(["createCredentials","createSign","createVerify","createDiffieHellman"],function(e){t[e]=function(){i("sorry,",e,"is not implemented yet")}})}).call(t,r(160).Buffer)},function(e,t,r){(function(e){/*!
* The buffer module from node.js, for the browser.
*
* @author Feross Aboukhadijeh <http://feross.org>
* @license MIT
*/
"use strict";function n(){try{var e=new Uint8Array(1);return e.__proto__={__proto__:Uint8Array.prototype,foo:function(){return 42}},42===e.foo()&&"function"==typeof e.subarray&&0===e.subarray(1,1).byteLength}catch(e){return!1}}function i(){return o.TYPED_ARRAY_SUPPORT?2147483647:1073741823}function a(e,t){if(i()<t)throw new RangeError("Invalid typed array length");return o.TYPED_ARRAY_SUPPORT?(e=new Uint8Array(t),e.__proto__=o.prototype):(null===e&&(e=new o(t)),e.length=t),e}function o(e,t,r){if(!(o.TYPED_ARRAY_SUPPORT||this instanceof o))return new o(e,t,r);if("number"==typeof e){if("string"==typeof t)throw new Error("If encoding is specified then the first argument must be a string");return l(this,e)}return s(this,e,t,r)}function s(e,t,r,n){if("number"==typeof t)throw new TypeError('"value" argument must not be a number');return"undefined"!=typeof ArrayBuffer&&t instanceof ArrayBuffer?d(e,t,r,n):"string"==typeof t?f(e,t,r):p(e,t)}function u(e){if("number"!=typeof e)throw new TypeError('"size" argument must be a number');if(e<0)throw new RangeError('"size" argument must not be negative')}function c(e,t,r,n){return u(t),t<=0?a(e,t):void 0!==r?"string"==typeof n?a(e,t).fill(r,n):a(e,t).fill(r):a(e,t)}function l(e,t){if(u(t),e=a(e,t<0?0:0|m(t)),!o.TYPED_ARRAY_SUPPORT)for(var r=0;r<t;++r)e[r]=0;return e}function f(e,t,r){if("string"==typeof r&&""!==r||(r="utf8"),!o.isEncoding(r))throw new TypeError('"encoding" must be a valid string encoding');var n=0|g(t,r);e=a(e,n);var i=e.write(t,r);return i!==n&&(e=e.slice(0,i)),e}function h(e,t){var r=t.length<0?0:0|m(t.length);e=a(e,r);for(var n=0;n<r;n+=1)e[n]=255&t[n];return e}function d(e,t,r,n){if(t.byteLength,r<0||t.byteLength<r)throw new RangeError("'offset' is out of bounds");if(t.byteLength<r+(n||0))throw new RangeError("'length' is out of bounds");return t=void 0===r&&void 0===n?new Uint8Array(t):void 0===n?new Uint8Array(t,r):new Uint8Array(t,r,n),o.TYPED_ARRAY_SUPPORT?(e=t,e.__proto__=o.prototype):e=h(e,t),e}function p(e,t){if(o.isBuffer(t)){var r=0|m(t.length);return e=a(e,r),0===e.length?e:(t.copy(e,0,0,r),e)}if(t){if("undefined"!=typeof ArrayBuffer&&t.buffer instanceof ArrayBuffer||"length"in t)return"number"!=typeof t.length||Z(t.length)?a(e,0):h(e,t);if("Buffer"===t.type&&Q(t.data))return h(e,t.data)}throw new TypeError("First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.")}function m(e){if(e>=i())throw new RangeError("Attempt to allocate Buffer larger than maximum size: 0x"+i().toString(16)+" bytes");return 0|e}function y(e){return+e!=e&&(e=0),o.alloc(+e)}function g(e,t){if(o.isBuffer(e))return e.length;if("undefined"!=typeof ArrayBuffer&&"function"==typeof ArrayBuffer.isView&&(ArrayBuffer.isView(e)||e instanceof ArrayBuffer))return e.byteLength;"string"!=typeof e&&(e=""+e);var r=e.length;if(0===r)return 0;for(var n=!1;;)switch(t){case"ascii":case"latin1":case"binary":return r;case"utf8":case"utf-8":case void 0:return H(e).length;case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return 2*r;case"hex":return r>>>1;case"base64":return q(e).length;default:if(n)return H(e).length;t=(""+t).toLowerCase(),n=!0}}function v(e,t,r){var n=!1;if((void 0===t||t<0)&&(t=0),t>this.length)return"";if((void 0===r||r>this.length)&&(r=this.length),r<=0)return"";if(r>>>=0,t>>>=0,r<=t)return"";for(e||(e="utf8");;)switch(e){case"hex":return L(this,t,r);case"utf8":case"utf-8":return j(this,t,r);case"ascii":return T(this,t,r);case"latin1":case"binary":return R(this,t,r);case"base64":return B(this,t,r);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return N(this,t,r);default:if(n)throw new TypeError("Unknown encoding: "+e);e=(e+"").toLowerCase(),n=!0}}function b(e,t,r){var n=e[t];e[t]=e[r],e[r]=n}function w(e,t,r,n,i){if(0===e.length)return-1;if("string"==typeof r?(n=r,r=0):r>2147483647?r=2147483647:r<-2147483648&&(r=-2147483648),r=+r,isNaN(r)&&(r=i?0:e.length-1),r<0&&(r=e.length+r),r>=e.length){if(i)return-1;r=e.length-1}else if(r<0){if(!i)return-1;r=0}if("string"==typeof t&&(t=o.from(t,n)),o.isBuffer(t))return 0===t.length?-1:_(e,t,r,n,i);if("number"==typeof t)return t&=255,o.TYPED_ARRAY_SUPPORT&&"function"==typeof Uint8Array.prototype.indexOf?i?Uint8Array.prototype.indexOf.call(e,t,r):Uint8Array.prototype.lastIndexOf.call(e,t,r):_(e,[t],r,n,i);throw new TypeError("val must be string, number or Buffer")}function _(e,t,r,n,i){function a(e,t){return 1===o?e[t]:e.readUInt16BE(t*o)}var o=1,s=e.length,u=t.length;if(void 0!==n&&(n=String(n).toLowerCase(),"ucs2"===n||"ucs-2"===n||"utf16le"===n||"utf-16le"===n)){if(e.length<2||t.length<2)return-1;o=2,s/=2,u/=2,r/=2}var c;if(i){var l=-1;for(c=r;c<s;c++)if(a(e,c)===a(t,l===-1?0:c-l)){if(l===-1&&(l=c),c-l+1===u)return l*o}else l!==-1&&(c-=c-l),l=-1}else for(r+u>s&&(r=s-u),c=r;c>=0;c--){for(var f=!0,h=0;h<u;h++)if(a(e,c+h)!==a(t,h)){f=!1;break}if(f)return c}return-1}function k(e,t,r,n){r=Number(r)||0;var i=e.length-r;n?(n=Number(n),n>i&&(n=i)):n=i;var a=t.length;if(a%2!==0)throw new TypeError("Invalid hex string");n>a/2&&(n=a/2);for(var o=0;o<n;++o){var s=parseInt(t.substr(2*o,2),16);if(isNaN(s))return o;e[r+o]=s}return o}function S(e,t,r,n){return Y(H(t,e.length-r),e,r,n)}function A(e,t,r,n){return Y(W(t),e,r,n)}function M(e,t,r,n){return A(e,t,r,n)}function x(e,t,r,n){return Y(q(t),e,r,n)}function E(e,t,r,n){return Y(V(t,e.length-r),e,r,n)}function B(e,t,r){return 0===t&&r===e.length?X.fromByteArray(e):X.fromByteArray(e.slice(t,r))}function j(e,t,r){r=Math.min(e.length,r);for(var n=[],i=t;i<r;){var a=e[i],o=null,s=a>239?4:a>223?3:a>191?2:1;if(i+s<=r){var u,c,l,f;switch(s){case 1:a<128&&(o=a);break;case 2:u=e[i+1],128===(192&u)&&(f=(31&a)<<6|63&u,f>127&&(o=f));break;case 3:u=e[i+1],c=e[i+2],128===(192&u)&&128===(192&c)&&(f=(15&a)<<12|(63&u)<<6|63&c,f>2047&&(f<55296||f>57343)&&(o=f));break;case 4:u=e[i+1],c=e[i+2],l=e[i+3],128===(192&u)&&128===(192&c)&&128===(192&l)&&(f=(15&a)<<18|(63&u)<<12|(63&c)<<6|63&l,f>65535&&f<1114112&&(o=f))}}null===o?(o=65533,s=1):o>65535&&(o-=65536,n.push(o>>>10&1023|55296),o=56320|1023&o),n.push(o),i+=s}return C(n)}function C(e){var t=e.length;if(t<=ee)return String.fromCharCode.apply(String,e);for(var r="",n=0;n<t;)r+=String.fromCharCode.apply(String,e.slice(n,n+=ee));return r}function T(e,t,r){var n="";r=Math.min(e.length,r);for(var i=t;i<r;++i)n+=String.fromCharCode(127&e[i]);return n}function R(e,t,r){var n="";r=Math.min(e.length,r);for(var i=t;i<r;++i)n+=String.fromCharCode(e[i]);return n}function L(e,t,r){var n=e.length;(!t||t<0)&&(t=0),(!r||r<0||r>n)&&(r=n);for(var i="",a=t;a<r;++a)i+=J(e[a]);return i}function N(e,t,r){for(var n=e.slice(t,r),i="",a=0;a<n.length;a+=2)i+=String.fromCharCode(n[a]+256*n[a+1]);return i}function I(e,t,r){if(e%1!==0||e<0)throw new RangeError("offset is not uint");if(e+t>r)throw new RangeError("Trying to access beyond buffer length")}function P(e,t,r,n,i,a){if(!o.isBuffer(e))throw new TypeError('"buffer" argument must be a Buffer instance');if(t>i||t<a)throw new RangeError('"value" argument is out of bounds');if(r+n>e.length)throw new RangeError("Index out of range")}function O(e,t,r,n){t<0&&(t=65535+t+1);for(var i=0,a=Math.min(e.length-r,2);i<a;++i)e[r+i]=(t&255<<8*(n?i:1-i))>>>8*(n?i:1-i)}function D(e,t,r,n){t<0&&(t=4294967295+t+1);for(var i=0,a=Math.min(e.length-r,4);i<a;++i)e[r+i]=t>>>8*(n?i:3-i)&255}function z(e,t,r,n,i,a){if(r+n>e.length)throw new RangeError("Index out of range");if(r<0)throw new RangeError("Index out of range")}function K(e,t,r,n,i){return i||z(e,t,r,4,3.4028234663852886e38,-3.4028234663852886e38),$.write(e,t,r,n,23,4),r+4}function G(e,t,r,n,i){return i||z(e,t,r,8,1.7976931348623157e308,-1.7976931348623157e308),$.write(e,t,r,n,52,8),r+8}function F(e){if(e=U(e).replace(te,""),e.length<2)return"";for(;e.length%4!==0;)e+="=";return e}function U(e){return e.trim?e.trim():e.replace(/^\s+|\s+$/g,"")}function J(e){return e<16?"0"+e.toString(16):e.toString(16)}function H(e,t){t=t||1/0;for(var r,n=e.length,i=null,a=[],o=0;o<n;++o){if(r=e.charCodeAt(o),r>55295&&r<57344){if(!i){if(r>56319){(t-=3)>-1&&a.push(239,191,189);continue}if(o+1===n){(t-=3)>-1&&a.push(239,191,189);continue}i=r;continue}if(r<56320){(t-=3)>-1&&a.push(239,191,189),i=r;continue}r=(i-55296<<10|r-56320)+65536}else i&&(t-=3)>-1&&a.push(239,191,189);if(i=null,r<128){if((t-=1)<0)break;a.push(r)}else if(r<2048){if((t-=2)<0)break;a.push(r>>6|192,63&r|128)}else if(r<65536){if((t-=3)<0)break;a.push(r>>12|224,r>>6&63|128,63&r|128)}else{if(!(r<1114112))throw new Error("Invalid code point");if((t-=4)<0)break;a.push(r>>18|240,r>>12&63|128,r>>6&63|128,63&r|128)}}return a}function W(e){for(var t=[],r=0;r<e.length;++r)t.push(255&e.charCodeAt(r));return t}function V(e,t){for(var r,n,i,a=[],o=0;o<e.length&&!((t-=2)<0);++o)r=e.charCodeAt(o),n=r>>8,i=r%256,a.push(i),a.push(n);return a}function q(e){return X.toByteArray(F(e))}function Y(e,t,r,n){for(var i=0;i<n&&!(i+r>=t.length||i>=e.length);++i)t[i+r]=e[i];return i}function Z(e){return e!==e}var X=r(161),$=r(162),Q=r(163);t.Buffer=o,t.SlowBuffer=y,t.INSPECT_MAX_BYTES=50,o.TYPED_ARRAY_SUPPORT=void 0!==e.TYPED_ARRAY_SUPPORT?e.TYPED_ARRAY_SUPPORT:n(),t.kMaxLength=i(),o.poolSize=8192,o._augment=function(e){return e.__proto__=o.prototype,e},o.from=function(e,t,r){return s(null,e,t,r)},o.TYPED_ARRAY_SUPPORT&&(o.prototype.__proto__=Uint8Array.prototype,o.__proto__=Uint8Array,"undefined"!=typeof Symbol&&Symbol.species&&o[Symbol.species]===o&&Object.defineProperty(o,Symbol.species,{value:null,configurable:!0})),o.alloc=function(e,t,r){return c(null,e,t,r)},o.allocUnsafe=function(e){return l(null,e)},o.allocUnsafeSlow=function(e){return l(null,e)},o.isBuffer=function(e){return!(null==e||!e._isBuffer)},o.compare=function(e,t){if(!o.isBuffer(e)||!o.isBuffer(t))throw new TypeError("Arguments must be Buffers");if(e===t)return 0;for(var r=e.length,n=t.length,i=0,a=Math.min(r,n);i<a;++i)if(e[i]!==t[i]){r=e[i],n=t[i];break}return r<n?-1:n<r?1:0},o.isEncoding=function(e){switch(String(e).toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"latin1":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return!0;default:return!1}},o.concat=function(e,t){if(!Q(e))throw new TypeError('"list" argument must be an Array of Buffers');if(0===e.length)return o.alloc(0);var r;if(void 0===t)for(t=0,r=0;r<e.length;++r)t+=e[r].length;var n=o.allocUnsafe(t),i=0;for(r=0;r<e.length;++r){var a=e[r];if(!o.isBuffer(a))throw new TypeError('"list" argument must be an Array of Buffers');a.copy(n,i),i+=a.length}return n},o.byteLength=g,o.prototype._isBuffer=!0,o.prototype.swap16=function(){var e=this.length;if(e%2!==0)throw new RangeError("Buffer size must be a multiple of 16-bits");for(var t=0;t<e;t+=2)b(this,t,t+1);return this},o.prototype.swap32=function(){var e=this.length;if(e%4!==0)throw new RangeError("Buffer size must be a multiple of 32-bits");for(var t=0;t<e;t+=4)b(this,t,t+3),b(this,t+1,t+2);return this},o.prototype.swap64=function(){var e=this.length;if(e%8!==0)throw new RangeError("Buffer size must be a multiple of 64-bits");for(var t=0;t<e;t+=8)b(this,t,t+7),b(this,t+1,t+6),b(this,t+2,t+5),b(this,t+3,t+4);return this},o.prototype.toString=function(){var e=0|this.length;return 0===e?"":0===arguments.length?j(this,0,e):v.apply(this,arguments)},o.prototype.equals=function(e){if(!o.isBuffer(e))throw new TypeError("Argument must be a Buffer");return this===e||0===o.compare(this,e)},o.prototype.inspect=function(){var e="",r=t.INSPECT_MAX_BYTES;return this.length>0&&(e=this.toString("hex",0,r).match(/.{2}/g).join(" "),this.length>r&&(e+=" ... ")),"<Buffer "+e+">"},o.prototype.compare=function(e,t,r,n,i){if(!o.isBuffer(e))throw new TypeError("Argument must be a Buffer");if(void 0===t&&(t=0),void 0===r&&(r=e?e.length:0),void 0===n&&(n=0),void 0===i&&(i=this.length),t<0||r>e.length||n<0||i>this.length)throw new RangeError("out of range index");if(n>=i&&t>=r)return 0;if(n>=i)return-1;if(t>=r)return 1;if(t>>>=0,r>>>=0,n>>>=0,i>>>=0,this===e)return 0;for(var a=i-n,s=r-t,u=Math.min(a,s),c=this.slice(n,i),l=e.slice(t,r),f=0;f<u;++f)if(c[f]!==l[f]){a=c[f],s=l[f];break}return a<s?-1:s<a?1:0},o.prototype.includes=function(e,t,r){return this.indexOf(e,t,r)!==-1},o.prototype.indexOf=function(e,t,r){return w(this,e,t,r,!0)},o.prototype.lastIndexOf=function(e,t,r){return w(this,e,t,r,!1)},o.prototype.write=function(e,t,r,n){if(void 0===t)n="utf8",r=this.length,t=0;else if(void 0===r&&"string"==typeof t)n=t,r=this.length,t=0;else{if(!isFinite(t))throw new Error("Buffer.write(string, encoding, offset[, length]) is no longer supported");t|=0,isFinite(r)?(r|=0,void 0===n&&(n="utf8")):(n=r,r=void 0)}var i=this.length-t;if((void 0===r||r>i)&&(r=i),e.length>0&&(r<0||t<0)||t>this.length)throw new RangeError("Attempt to write outside buffer bounds");n||(n="utf8");for(var a=!1;;)switch(n){case"hex":return k(this,e,t,r);case"utf8":case"utf-8":return S(this,e,t,r);case"ascii":return A(this,e,t,r);case"latin1":case"binary":return M(this,e,t,r);case"base64":return x(this,e,t,r);case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":return E(this,e,t,r);default:if(a)throw new TypeError("Unknown encoding: "+n);n=(""+n).toLowerCase(),a=!0}},o.prototype.toJSON=function(){return{type:"Buffer",data:Array.prototype.slice.call(this._arr||this,0)}};var ee=4096;o.prototype.slice=function(e,t){var r=this.length;e=~~e,t=void 0===t?r:~~t,e<0?(e+=r,e<0&&(e=0)):e>r&&(e=r),t<0?(t+=r,t<0&&(t=0)):t>r&&(t=r),t<e&&(t=e);var n;if(o.TYPED_ARRAY_SUPPORT)n=this.subarray(e,t),n.__proto__=o.prototype;else{var i=t-e;n=new o(i,void 0);for(var a=0;a<i;++a)n[a]=this[a+e]}return n},o.prototype.readUIntLE=function(e,t,r){e|=0,t|=0,r||I(e,t,this.length);for(var n=this[e],i=1,a=0;++a<t&&(i*=256);)n+=this[e+a]*i;return n},o.prototype.readUIntBE=function(e,t,r){e|=0,t|=0,r||I(e,t,this.length);for(var n=this[e+--t],i=1;t>0&&(i*=256);)n+=this[e+--t]*i;return n},o.prototype.readUInt8=function(e,t){return t||I(e,1,this.length),this[e]},o.prototype.readUInt16LE=function(e,t){return t||I(e,2,this.length),this[e]|this[e+1]<<8},o.prototype.readUInt16BE=function(e,t){return t||I(e,2,this.length),this[e]<<8|this[e+1]},o.prototype.readUInt32LE=function(e,t){return t||I(e,4,this.length),(this[e]|this[e+1]<<8|this[e+2]<<16)+16777216*this[e+3]},o.prototype.readUInt32BE=function(e,t){return t||I(e,4,this.length),16777216*this[e]+(this[e+1]<<16|this[e+2]<<8|this[e+3])},o.prototype.readIntLE=function(e,t,r){e|=0,t|=0,r||I(e,t,this.length);for(var n=this[e],i=1,a=0;++a<t&&(i*=256);)n+=this[e+a]*i;return i*=128,n>=i&&(n-=Math.pow(2,8*t)),n},o.prototype.readIntBE=function(e,t,r){e|=0,t|=0,r||I(e,t,this.length);for(var n=t,i=1,a=this[e+--n];n>0&&(i*=256);)a+=this[e+--n]*i;return i*=128,a>=i&&(a-=Math.pow(2,8*t)),a},o.prototype.readInt8=function(e,t){return t||I(e,1,this.length),128&this[e]?(255-this[e]+1)*-1:this[e]},o.prototype.readInt16LE=function(e,t){t||I(e,2,this.length);var r=this[e]|this[e+1]<<8;return 32768&r?4294901760|r:r},o.prototype.readInt16BE=function(e,t){t||I(e,2,this.length);var r=this[e+1]|this[e]<<8;return 32768&r?4294901760|r:r},o.prototype.readInt32LE=function(e,t){return t||I(e,4,this.length),this[e]|this[e+1]<<8|this[e+2]<<16|this[e+3]<<24},o.prototype.readInt32BE=function(e,t){return t||I(e,4,this.length),this[e]<<24|this[e+1]<<16|this[e+2]<<8|this[e+3]},o.prototype.readFloatLE=function(e,t){return t||I(e,4,this.length),$.read(this,e,!0,23,4)},o.prototype.readFloatBE=function(e,t){return t||I(e,4,this.length),$.read(this,e,!1,23,4)},o.prototype.readDoubleLE=function(e,t){return t||I(e,8,this.length),$.read(this,e,!0,52,8)},o.prototype.readDoubleBE=function(e,t){return t||I(e,8,this.length),$.read(this,e,!1,52,8)},o.prototype.writeUIntLE=function(e,t,r,n){if(e=+e,t|=0,r|=0,!n){var i=Math.pow(2,8*r)-1;P(this,e,t,r,i,0)}var a=1,o=0;for(this[t]=255&e;++o<r&&(a*=256);)this[t+o]=e/a&255;return t+r},o.prototype.writeUIntBE=function(e,t,r,n){if(e=+e,t|=0,r|=0,!n){var i=Math.pow(2,8*r)-1;P(this,e,t,r,i,0)}var a=r-1,o=1;for(this[t+a]=255&e;--a>=0&&(o*=256);)this[t+a]=e/o&255;return t+r},o.prototype.writeUInt8=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,1,255,0),o.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),this[t]=255&e,t+1},o.prototype.writeUInt16LE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,2,65535,0),o.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):O(this,e,t,!0),t+2},o.prototype.writeUInt16BE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,2,65535,0),o.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):O(this,e,t,!1),t+2},o.prototype.writeUInt32LE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,4,4294967295,0),o.TYPED_ARRAY_SUPPORT?(this[t+3]=e>>>24,this[t+2]=e>>>16,this[t+1]=e>>>8,this[t]=255&e):D(this,e,t,!0),t+4},o.prototype.writeUInt32BE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,4,4294967295,0),o.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):D(this,e,t,!1),t+4},o.prototype.writeIntLE=function(e,t,r,n){if(e=+e,t|=0,!n){var i=Math.pow(2,8*r-1);P(this,e,t,r,i-1,-i)}var a=0,o=1,s=0;for(this[t]=255&e;++a<r&&(o*=256);)e<0&&0===s&&0!==this[t+a-1]&&(s=1),this[t+a]=(e/o>>0)-s&255;return t+r},o.prototype.writeIntBE=function(e,t,r,n){if(e=+e,t|=0,!n){var i=Math.pow(2,8*r-1);P(this,e,t,r,i-1,-i)}var a=r-1,o=1,s=0;for(this[t+a]=255&e;--a>=0&&(o*=256);)e<0&&0===s&&0!==this[t+a+1]&&(s=1),this[t+a]=(e/o>>0)-s&255;return t+r},o.prototype.writeInt8=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,1,127,-128),o.TYPED_ARRAY_SUPPORT||(e=Math.floor(e)),e<0&&(e=255+e+1),this[t]=255&e,t+1},o.prototype.writeInt16LE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,2,32767,-32768),o.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8):O(this,e,t,!0),t+2},o.prototype.writeInt16BE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,2,32767,-32768),o.TYPED_ARRAY_SUPPORT?(this[t]=e>>>8,this[t+1]=255&e):O(this,e,t,!1),t+2},o.prototype.writeInt32LE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,4,2147483647,-2147483648),o.TYPED_ARRAY_SUPPORT?(this[t]=255&e,this[t+1]=e>>>8,this[t+2]=e>>>16,this[t+3]=e>>>24):D(this,e,t,!0),t+4},o.prototype.writeInt32BE=function(e,t,r){return e=+e,t|=0,r||P(this,e,t,4,2147483647,-2147483648),e<0&&(e=4294967295+e+1),o.TYPED_ARRAY_SUPPORT?(this[t]=e>>>24,this[t+1]=e>>>16,this[t+2]=e>>>8,this[t+3]=255&e):D(this,e,t,!1),t+4},o.prototype.writeFloatLE=function(e,t,r){return K(this,e,t,!0,r)},o.prototype.writeFloatBE=function(e,t,r){return K(this,e,t,!1,r)},o.prototype.writeDoubleLE=function(e,t,r){return G(this,e,t,!0,r)},o.prototype.writeDoubleBE=function(e,t,r){return G(this,e,t,!1,r)},o.prototype.copy=function(e,t,r,n){if(r||(r=0),n||0===n||(n=this.length),t>=e.length&&(t=e.length),t||(t=0),n>0&&n<r&&(n=r),n===r)return 0;if(0===e.length||0===this.length)return 0;if(t<0)throw new RangeError("targetStart out of bounds");if(r<0||r>=this.length)throw new RangeError("sourceStart out of bounds");if(n<0)throw new RangeError("sourceEnd out of bounds");n>this.length&&(n=this.length),e.length-t<n-r&&(n=e.length-t+r);var i,a=n-r;if(this===e&&r<t&&t<n)for(i=a-1;i>=0;--i)e[i+t]=this[i+r];else if(a<1e3||!o.TYPED_ARRAY_SUPPORT)for(i=0;i<a;++i)e[i+t]=this[i+r];else Uint8Array.prototype.set.call(e,this.subarray(r,r+a),t);return a},o.prototype.fill=function(e,t,r,n){if("string"==typeof e){if("string"==typeof t?(n=t,t=0,r=this.length):"string"==typeof r&&(n=r,r=this.length),1===e.length){var i=e.charCodeAt(0);i<256&&(e=i)}if(void 0!==n&&"string"!=typeof n)throw new TypeError("encoding must be a string");if("string"==typeof n&&!o.isEncoding(n))throw new TypeError("Unknown encoding: "+n)}else"number"==typeof e&&(e&=255);if(t<0||this.length<t||this.length<r)throw new RangeError("Out of range index");if(r<=t)return this;t>>>=0,r=void 0===r?this.length:r>>>0,e||(e=0);var a;if("number"==typeof e)for(a=t;a<r;++a)this[a]=e;else{var s=o.isBuffer(e)?e:H(new o(e,n).toString()),u=s.length;for(a=0;a<r-t;++a)this[a+t]=s[a%u]}return this};var te=/[^+\/0-9A-Za-z-_]/g}).call(t,function(){return this}())},function(e,t){"use strict";function r(e){var t=e.length;if(t%4>0)throw new Error("Invalid string. Length must be a multiple of 4");var r=e.indexOf("=");r===-1&&(r=t);var n=r===t?0:4-r%4;return[r,n]}function n(e){var t=r(e),n=t[0],i=t[1];return 3*(n+i)/4-i}function i(e,t,r){return 3*(t+r)/4-r}function a(e){var t,n,a=r(e),o=a[0],s=a[1],u=new f(i(e,o,s)),c=0,h=s>0?o-4:o;for(n=0;n<h;n+=4)t=l[e.charCodeAt(n)]<<18|l[e.charCodeAt(n+1)]<<12|l[e.charCodeAt(n+2)]<<6|l[e.charCodeAt(n+3)],u[c++]=t>>16&255,u[c++]=t>>8&255,u[c++]=255&t;return 2===s&&(t=l[e.charCodeAt(n)]<<2|l[e.charCodeAt(n+1)]>>4,u[c++]=255&t),1===s&&(t=l[e.charCodeAt(n)]<<10|l[e.charCodeAt(n+1)]<<4|l[e.charCodeAt(n+2)]>>2,u[c++]=t>>8&255,u[c++]=255&t),u}function o(e){return c[e>>18&63]+c[e>>12&63]+c[e>>6&63]+c[63&e]}function s(e,t,r){for(var n,i=[],a=t;a<r;a+=3)n=(e[a]<<16&16711680)+(e[a+1]<<8&65280)+(255&e[a+2]),i.push(o(n));return i.join("")}function u(e){for(var t,r=e.length,n=r%3,i=[],a=16383,o=0,u=r-n;o<u;o+=a)i.push(s(e,o,o+a>u?u:o+a));return 1===n?(t=e[r-1],i.push(c[t>>2]+c[t<<4&63]+"==")):2===n&&(t=(e[r-2]<<8)+e[r-1],i.push(c[t>>10]+c[t>>4&63]+c[t<<2&63]+"=")),i.join("")}t.byteLength=n,t.toByteArray=a,t.fromByteArray=u;for(var c=[],l=[],f="undefined"!=typeof Uint8Array?Uint8Array:Array,h="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/",d=0,p=h.length;d<p;++d)c[d]=h[d],l[h.charCodeAt(d)]=d;l["-".charCodeAt(0)]=62,l["_".charCodeAt(0)]=63},function(e,t){t.read=function(e,t,r,n,i){var a,o,s=8*i-n-1,u=(1<<s)-1,c=u>>1,l=-7,f=r?i-1:0,h=r?-1:1,d=e[t+f];for(f+=h,a=d&(1<<-l)-1,d>>=-l,l+=s;l>0;a=256*a+e[t+f],f+=h,l-=8);for(o=a&(1<<-l)-1,a>>=-l,l+=n;l>0;o=256*o+e[t+f],f+=h,l-=8);if(0===a)a=1-c;else{if(a===u)return o?NaN:(d?-1:1)*(1/0);o+=Math.pow(2,n),a-=c}return(d?-1:1)*o*Math.pow(2,a-n)},t.write=function(e,t,r,n,i,a){var o,s,u,c=8*a-i-1,l=(1<<c)-1,f=l>>1,h=23===i?Math.pow(2,-24)-Math.pow(2,-77):0,d=n?0:a-1,p=n?1:-1,m=t<0||0===t&&1/t<0?1:0;for(t=Math.abs(t),isNaN(t)||t===1/0?(s=isNaN(t)?1:0,o=l):(o=Math.floor(Math.log(t)/Math.LN2),t*(u=Math.pow(2,-o))<1&&(o--,u*=2),t+=o+f>=1?h/u:h*Math.pow(2,1-f),t*u>=2&&(o++,u/=2),o+f>=l?(s=0,o=l):o+f>=1?(s=(t*u-1)*Math.pow(2,i),o+=f):(s=t*Math.pow(2,f-1)*Math.pow(2,i),o=0));i>=8;e[r+d]=255&s,d+=p,s/=256,i-=8);for(o=o<<i|s,c+=i;c>0;e[r+d]=255&o,d+=p,o/=256,c-=8);e[r+d-p]|=128*m}},function(e,t){var r={}.toString;e.exports=Array.isArray||function(e){return"[object Array]"==r.call(e)}},function(e,t,r){(function(t,n){!function(){var i=("undefined"==typeof window?t:window)||{};_crypto=i.crypto||i.msCrypto||r(165),e.exports=function(e){if(_crypto.getRandomValues){var t=new n(e);return _crypto.getRandomValues(t),t}if(_crypto.randomBytes)return _crypto.randomBytes(e);throw new Error("secure random number generation not supported by this browser\nuse chrome, FireFox or Internet Explorer 11")}}()}).call(t,function(){return this}(),r(160).Buffer)},function(e,t){},function(e,t,r){(function(t){function n(e){return function(){var r=[],n={update:function(e,n){return t.isBuffer(e)||(e=new t(e,n)),r.push(e),this},digest:function(n){var i=t.concat(r),a=e(i);return r=null,n?a.toString(n):a}};return n}}var i=r(167),a=n(r(179)),o=n(r(181));e.exports=function(e){return"md5"===e?new a:"rmd160"===e?new o:i(e)}}).call(t,r(160).Buffer)},function(e,t,r){var t=e.exports=function(e){var r=t[e];if(!r)throw new Error(e+" is not supported (we accept pull requests)");return new r},n=r(160).Buffer,i=r(168)(n);t.sha1=r(169)(n,i),t.sha256=r(177)(n,i),t.sha512=r(178)(n,i)},function(e,t){e.exports=function(e){function t(t,r){this._block=new e(t),this._finalSize=r,this._blockSize=t,this._len=0,this._s=0}return t.prototype.init=function(){this._s=0,this._len=0},t.prototype.update=function(t,r){"string"==typeof t&&(r=r||"utf8",t=new e(t,r));for(var n=this._len+=t.length,i=this._s=this._s||0,a=0,o=this._block;i<n;){for(var s=Math.min(t.length,a+this._blockSize-i%this._blockSize),u=s-a,c=0;c<u;c++)o[i%this._blockSize+c]=t[c+a];i+=u,a+=u,i%this._blockSize===0&&this._update(o)}return this._s=i,this},t.prototype.digest=function(e){var t=8*this._len;this._block[this._len%this._blockSize]=128,this._block.fill(0,this._len%this._blockSize+1),t%(8*this._blockSize)>=8*this._finalSize&&(this._update(this._block),this._block.fill(0)),this._block.writeInt32BE(t,this._blockSize-4);var r=this._update(this._block)||this._hash();return e?r.toString(e):r},t.prototype._update=function(){throw new Error("_update must be implemented by subclass")},t}},function(e,t,r){var n=r(170).inherits;e.exports=function(e,t){function r(){return p.length?p.pop().init():this instanceof r?(this._w=d,t.call(this,64,56),this._h=null,void this.init()):new r}function i(e,t,r,n){return e<20?t&r|~t&n:e<40?t^r^n:e<60?t&r|t&n|r&n:t^r^n}function a(e){return e<20?1518500249:e<40?1859775393:e<60?-1894007588:-899497514}function o(e,t){return e+t|0}function s(e,t){return e<<t|e>>>32-t}var u=0,c=4,l=8,f=12,h=16,d=new("undefined"==typeof Int32Array?Array:Int32Array)(80),p=[];return n(r,t),r.prototype.init=function(){return this._a=1732584193,this._b=4023233417,this._c=2562383102,this._d=271733878,this._e=3285377520,t.prototype.init.call(this),this},r.prototype._POOL=p,r.prototype._update=function(e){var t,r,n,u,c,l,f,h,d,p;t=l=this._a,r=f=this._b,n=h=this._c,u=d=this._d,c=p=this._e;for(var m=this._w,y=0;y<80;y++){var g=m[y]=y<16?e.readInt32BE(4*y):s(m[y-3]^m[y-8]^m[y-14]^m[y-16],1),v=o(o(s(t,5),i(y,r,n,u)),o(o(c,g),a(y)));c=u,u=n,n=s(r,30),r=t,t=v}this._a=o(t,l),this._b=o(r,f),this._c=o(n,h),this._d=o(u,d),this._e=o(c,p)},r.prototype._hash=function(){p.length<100&&p.push(this);var t=new e(20);return t.writeInt32BE(0|this._a,u),t.writeInt32BE(0|this._b,c),t.writeInt32BE(0|this._c,l),t.writeInt32BE(0|this._d,f),t.writeInt32BE(0|this._e,h),t},r}},function(e,t,r){(function(e){function n(e,r){var n={seen:[],stylize:a};return arguments.length>=3&&(n.depth=arguments[2]),arguments.length>=4&&(n.colors=arguments[3]),p(r)?n.showHidden=r:r&&t._extend(n,r),w(n.showHidden)&&(n.showHidden=!1),w(n.depth)&&(n.depth=2),w(n.colors)&&(n.colors=!1),w(n.customInspect)&&(n.customInspect=!0),n.colors&&(n.stylize=i),s(n,e,n.depth)}function i(e,t){var r=n.styles[t];return r?"["+n.colors[r][0]+"m"+e+"["+n.colors[r][1]+"m":e}function a(e,t){return e}function o(e){var t={};return e.forEach(function(e,r){t[e]=!0}),t}function s(e,r,n){if(e.customInspect&&r&&M(r.inspect)&&r.inspect!==t.inspect&&(!r.constructor||r.constructor.prototype!==r)){var i=r.inspect(n,e);return v(i)||(i=s(e,i,n)),i}var a=u(e,r);if(a)return a;var p=Object.keys(r),m=o(p);if(e.showHidden&&(p=Object.getOwnPropertyNames(r)),A(r)&&(p.indexOf("message")>=0||p.indexOf("description")>=0))return c(r);if(0===p.length){if(M(r)){var y=r.name?": "+r.name:"";return e.stylize("[Function"+y+"]","special")}if(_(r))return e.stylize(RegExp.prototype.toString.call(r),"regexp");if(S(r))return e.stylize(Date.prototype.toString.call(r),"date");if(A(r))return c(r)}var g="",b=!1,w=["{","}"];if(d(r)&&(b=!0,w=["[","]"]),M(r)){var k=r.name?": "+r.name:"";g=" [Function"+k+"]"}if(_(r)&&(g=" "+RegExp.prototype.toString.call(r)),S(r)&&(g=" "+Date.prototype.toUTCString.call(r)),A(r)&&(g=" "+c(r)),0===p.length&&(!b||0==r.length))return w[0]+g+w[1];if(n<0)return _(r)?e.stylize(RegExp.prototype.toString.call(r),"regexp"):e.stylize("[Object]","special");e.seen.push(r);var x;return x=b?l(e,r,n,m,p):p.map(function(t){return f(e,r,n,m,t,b)}),e.seen.pop(),h(x,g,w)}function u(e,t){if(w(t))return e.stylize("undefined","undefined");if(v(t)){var r="'"+JSON.stringify(t).replace(/^"|"$/g,"").replace(/'/g,"\\'").replace(/\\"/g,'"')+"'";return e.stylize(r,"string")}return g(t)?e.stylize(""+t,"number"):p(t)?e.stylize(""+t,"boolean"):m(t)?e.stylize("null","null"):void 0}function c(e){return"["+Error.prototype.toString.call(e)+"]"}function l(e,t,r,n,i){for(var a=[],o=0,s=t.length;o<s;++o)C(t,String(o))?a.push(f(e,t,r,n,String(o),!0)):a.push("");return i.forEach(function(i){i.match(/^\d+$/)||a.push(f(e,t,r,n,i,!0))}),a}function f(e,t,r,n,i,a){var o,u,c;if(c=Object.getOwnPropertyDescriptor(t,i)||{value:t[i]},c.get?u=c.set?e.stylize("[Getter/Setter]","special"):e.stylize("[Getter]","special"):c.set&&(u=e.stylize("[Setter]","special")),C(n,i)||(o="["+i+"]"),u||(e.seen.indexOf(c.value)<0?(u=m(r)?s(e,c.value,null):s(e,c.value,r-1),u.indexOf("\n")>-1&&(u=a?u.split("\n").map(function(e){return" "+e}).join("\n").substr(2):"\n"+u.split("\n").map(function(e){return" "+e}).join("\n"))):u=e.stylize("[Circular]","special")),w(o)){if(a&&i.match(/^\d+$/))return u;o=JSON.stringify(""+i),o.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)?(o=o.substr(1,o.length-2),o=e.stylize(o,"name")):(o=o.replace(/'/g,"\\'").replace(/\\"/g,'"').replace(/(^"|"$)/g,"'"),o=e.stylize(o,"string"))}return o+": "+u}function h(e,t,r){var n=0,i=e.reduce(function(e,t){return n++,t.indexOf("\n")>=0&&n++,e+t.replace(/\u001b\[\d\d?m/g,"").length+1},0);return i>60?r[0]+(""===t?"":t+"\n ")+" "+e.join(",\n ")+" "+r[1]:r[0]+t+" "+e.join(", ")+" "+r[1]}function d(e){return Array.isArray(e)}function p(e){return"boolean"==typeof e}function m(e){return null===e}function y(e){return null==e}function g(e){return"number"==typeof e}function v(e){return"string"==typeof e}function b(e){return"symbol"==typeof e}function w(e){return void 0===e}function _(e){return k(e)&&"[object RegExp]"===E(e)}function k(e){return"object"==typeof e&&null!==e}function S(e){return k(e)&&"[object Date]"===E(e)}function A(e){return k(e)&&("[object Error]"===E(e)||e instanceof Error)}function M(e){return"function"==typeof e}function x(e){return null===e||"boolean"==typeof e||"number"==typeof e||"string"==typeof e||"symbol"==typeof e||"undefined"==typeof e}function E(e){return Object.prototype.toString.call(e)}function B(e){return e<10?"0"+e.toString(10):e.toString(10)}function j(){var e=new Date,t=[B(e.getHours()),B(e.getMinutes()),B(e.getSeconds())].join(":");return[e.getDate(),D[e.getMonth()],t].join(" ")}function C(e,t){return Object.prototype.hasOwnProperty.call(e,t)}function T(e,t){if(!e){var r=new Error("Promise was rejected with a falsy value");r.reason=e,e=r}return t(e)}function R(t){function r(){for(var r=[],n=0;n<arguments.length;n++)r.push(arguments[n]);var i=r.pop();if("function"!=typeof i)throw new TypeError("The last argument must be of type Function");var a=this,o=function(){return i.apply(a,arguments)};t.apply(this,r).then(function(t){e.nextTick(o.bind(null,null,t))},function(t){e.nextTick(T.bind(null,t,o))})}if("function"!=typeof t)throw new TypeError('The "original" argument must be of type Function');return Object.setPrototypeOf(r,Object.getPrototypeOf(t)),Object.defineProperties(r,L(t)),r}var L=Object.getOwnPropertyDescriptors||function(e){for(var t=Object.keys(e),r={},n=0;n<t.length;n++)r[t[n]]=Object.getOwnPropertyDescriptor(e,t[n]);return r},N=/%[sdj%]/g;t.format=function(e){if(!v(e)){for(var t=[],r=0;r<arguments.length;r++)t.push(n(arguments[r]));return t.join(" ")}for(var r=1,i=arguments,a=i.length,o=String(e).replace(N,function(e){if("%%"===e)return"%";if(r>=a)return e;switch(e){case"%s":return String(i[r++]);case"%d":return Number(i[r++]);case"%j":try{return JSON.stringify(i[r++])}catch(e){return"[Circular]"}default:return e}}),s=i[r];r<a;s=i[++r])o+=m(s)||!k(s)?" "+s:" "+n(s);return o},t.deprecate=function(r,n){function i(){if(!a){if(e.throwDeprecation)throw new Error(n);e.traceDeprecation?console.trace(n):console.error(n),a=!0}return r.apply(this,arguments)}if("undefined"!=typeof e&&e.noDeprecation===!0)return r;if("undefined"==typeof e)return function(){return t.deprecate(r,n).apply(this,arguments)};var a=!1;return i};var I={},P=/^$/;if({NODE_ENV:"production"}.NODE_DEBUG){var O={NODE_ENV:"production"}.NODE_DEBUG;O=O.replace(/[|\\{}()[\]^$+?.]/g,"\\$&").replace(/\*/g,".*").replace(/,/g,"$|^").toUpperCase(),P=new RegExp("^"+O+"$","i")}t.debuglog=function(r){if(r=r.toUpperCase(),!I[r])if(P.test(r)){var n=e.pid;I[r]=function(){var e=t.format.apply(t,arguments);console.error("%s %d: %s",r,n,e)}}else I[r]=function(){};return I[r]},t.inspect=n,n.colors={bold:[1,22],italic:[3,23],underline:[4,24],inverse:[7,27],white:[37,39],grey:[90,39],
black:[30,39],blue:[34,39],cyan:[36,39],green:[32,39],magenta:[35,39],red:[31,39],yellow:[33,39]},n.styles={special:"cyan",number:"yellow",boolean:"yellow",undefined:"grey",null:"bold",string:"green",date:"magenta",regexp:"red"},t.types=r(172),t.isArray=d,t.isBoolean=p,t.isNull=m,t.isNullOrUndefined=y,t.isNumber=g,t.isString=v,t.isSymbol=b,t.isUndefined=w,t.isRegExp=_,t.types.isRegExp=_,t.isObject=k,t.isDate=S,t.types.isDate=S,t.isError=A,t.types.isNativeError=A,t.isFunction=M,t.isPrimitive=x,t.isBuffer=r(173);var D=["Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"];t.log=function(){console.log("%s - %s",j(),t.format.apply(t,arguments))},t.inherits=r(176),t._extend=function(e,t){if(!t||!k(t))return e;for(var r=Object.keys(t),n=r.length;n--;)e[r[n]]=t[r[n]];return e};var z="undefined"!=typeof Symbol?Symbol("util.promisify.custom"):void 0;t.promisify=function(e){function t(){for(var t,r,n=new Promise(function(e,n){t=e,r=n}),i=[],a=0;a<arguments.length;a++)i.push(arguments[a]);i.push(function(e,n){e?r(e):t(n)});try{e.apply(this,i)}catch(e){r(e)}return n}if("function"!=typeof e)throw new TypeError('The "original" argument must be of type Function');if(z&&e[z]){var t=e[z];if("function"!=typeof t)throw new TypeError('The "util.promisify.custom" argument must be of type Function');return Object.defineProperty(t,z,{value:t,enumerable:!1,writable:!1,configurable:!0}),t}return Object.setPrototypeOf(t,Object.getPrototypeOf(e)),z&&Object.defineProperty(t,z,{value:t,enumerable:!1,writable:!1,configurable:!0}),Object.defineProperties(t,L(e))},t.promisify.custom=z,t.callbackify=R}).call(t,r(171))},function(e,t){function r(){throw new Error("setTimeout has not been defined")}function n(){throw new Error("clearTimeout has not been defined")}function i(e){if(l===setTimeout)return setTimeout(e,0);if((l===r||!l)&&setTimeout)return l=setTimeout,setTimeout(e,0);try{return l(e,0)}catch(t){try{return l.call(null,e,0)}catch(t){return l.call(this,e,0)}}}function a(e){if(f===clearTimeout)return clearTimeout(e);if((f===n||!f)&&clearTimeout)return f=clearTimeout,clearTimeout(e);try{return f(e)}catch(t){try{return f.call(null,e)}catch(t){return f.call(this,e)}}}function o(){m&&d&&(m=!1,d.length?p=d.concat(p):y=-1,p.length&&s())}function s(){if(!m){var e=i(o);m=!0;for(var t=p.length;t;){for(d=p,p=[];++y<t;)d&&d[y].run();y=-1,t=p.length}d=null,m=!1,a(e)}}function u(e,t){this.fun=e,this.array=t}function c(){}var l,f,h=e.exports={};!function(){try{l="function"==typeof setTimeout?setTimeout:r}catch(e){l=r}try{f="function"==typeof clearTimeout?clearTimeout:n}catch(e){f=n}}();var d,p=[],m=!1,y=-1;h.nextTick=function(e){var t=new Array(arguments.length-1);if(arguments.length>1)for(var r=1;r<arguments.length;r++)t[r-1]=arguments[r];p.push(new u(e,t)),1!==p.length||m||i(s)},u.prototype.run=function(){this.fun.apply(null,this.array)},h.title="browser",h.browser=!0,h.env={},h.argv=[],h.version="",h.versions={},h.on=c,h.addListener=c,h.once=c,h.off=c,h.removeListener=c,h.removeAllListeners=c,h.emit=c,h.prependListener=c,h.prependOnceListener=c,h.listeners=function(e){return[]},h.binding=function(e){throw new Error("process.binding is not supported")},h.cwd=function(){return"/"},h.chdir=function(e){throw new Error("process.chdir is not supported")},h.umask=function(){return 0}},function(e,t,r){"use strict";function n(e){return e.call.bind(e)}function i(e,t){if("object"!=typeof e)return!1;try{return t(e),!0}catch(e){return!1}}function a(e){return"undefined"!=typeof Promise&&e instanceof Promise||null!==e&&"object"==typeof e&&"function"==typeof e.then&&"function"==typeof e.catch}function o(e){return $&&ArrayBuffer.isView?ArrayBuffer.isView(e):s(e)||C(e)}function s(e){return X&&Z?void 0!==ee(e):u(e)||c(e)||l(e)||f(e)||h(e)||d(e)||p(e)||m(e)||y(e)||g(e)||v(e)}function u(e){return X&&Z?"Uint8Array"===ee(e):"[object Uint8Array]"===te(e)||H(e)&&void 0!==e.buffer}function c(e){return X&&Z?"Uint8ClampedArray"===ee(e):"[object Uint8ClampedArray]"===te(e)}function l(e){return X&&Z?"Uint16Array"===ee(e):"[object Uint16Array]"===te(e)}function f(e){return X&&Z?"Uint32Array"===ee(e):"[object Uint32Array]"===te(e)}function h(e){return X&&Z?"Int8Array"===ee(e):"[object Int8Array]"===te(e)}function d(e){return X&&Z?"Int16Array"===ee(e):"[object Int16Array]"===te(e)}function p(e){return X&&Z?"Int32Array"===ee(e):"[object Int32Array]"===te(e)}function m(e){return X&&Z?"Float32Array"===ee(e):"[object Float32Array]"===te(e)}function y(e){return X&&Z?"Float64Array"===ee(e):"[object Float64Array]"===te(e)}function g(e){return X&&Z?"BigInt64Array"===ee(e):"[object BigInt64Array]"===te(e)}function v(e){return X&&Z?"BigUint64Array"===ee(e):"[object BigUint64Array]"===te(e)}function b(e){return"[object Map]"===te(e)}function w(e){return"undefined"!=typeof Map&&(b.working?b(e):e instanceof Map)}function _(e){return"[object Set]"===te(e)}function k(e){return"undefined"!=typeof Set&&(_.working?_(e):e instanceof Set)}function S(e){return"[object WeakMap]"===te(e)}function A(e){return"undefined"!=typeof WeakMap&&(S.working?S(e):e instanceof WeakMap)}function M(e){return"[object WeakSet]"===te(e)}function x(e){return M(e)}function E(e){return"[object ArrayBuffer]"===te(e)}function B(e){return"undefined"!=typeof ArrayBuffer&&(E.working?E(e):e instanceof ArrayBuffer)}function j(e){return"[object DataView]"===te(e)}function C(e){return"undefined"!=typeof DataView&&(j.working?j(e):e instanceof DataView)}function T(e){return"[object SharedArrayBuffer]"===te(e)}function R(e){return"undefined"!=typeof SharedArrayBuffer&&(T.working?T(e):e instanceof SharedArrayBuffer)}function L(e){return"[object AsyncFunction]"===te(e)}function N(e){return"[object Map Iterator]"===te(e)}function I(e){return"[object Set Iterator]"===te(e)}function P(e){return"[object Generator]"===te(e)}function O(e){return"[object WebAssembly.Module]"===te(e)}function D(e){return i(e,re)}function z(e){return i(e,ne)}function K(e){return i(e,ie)}function G(e){return q&&i(e,ae)}function F(e){return Y&&i(e,oe)}function U(e){return D(e)||z(e)||K(e)||G(e)||F(e)}function J(e){return X&&(B(e)||R(e))}var H=r(173),W=r(174),V=r(175),q="undefined"!=typeof BigInt,Y="undefined"!=typeof Symbol,Z=Y&&"undefined"!=typeof Symbol.toStringTag,X="undefined"!=typeof Uint8Array,$="undefined"!=typeof ArrayBuffer;if(X&&Z)var Q=Object.getPrototypeOf(Uint8Array.prototype),ee=n(Object.getOwnPropertyDescriptor(Q,Symbol.toStringTag).get);var te=n(Object.prototype.toString),re=n(Number.prototype.valueOf),ne=n(String.prototype.valueOf),ie=n(Boolean.prototype.valueOf);if(q)var ae=n(BigInt.prototype.valueOf);if(Y)var oe=n(Symbol.prototype.valueOf);t.isArgumentsObject=W,t.isGeneratorFunction=V,t.isPromise=a,t.isArrayBufferView=o,t.isTypedArray=s,t.isUint8Array=u,t.isUint8ClampedArray=c,t.isUint16Array=l,t.isUint32Array=f,t.isInt8Array=h,t.isInt16Array=d,t.isInt32Array=p,t.isFloat32Array=m,t.isFloat64Array=y,t.isBigInt64Array=g,t.isBigUint64Array=v,b.working="undefined"!=typeof Map&&b(new Map),t.isMap=w,_.working="undefined"!=typeof Set&&_(new Set),t.isSet=k,S.working="undefined"!=typeof WeakMap&&S(new WeakMap),t.isWeakMap=A,M.working="undefined"!=typeof WeakSet&&M(new WeakSet),t.isWeakSet=x,E.working="undefined"!=typeof ArrayBuffer&&E(new ArrayBuffer),t.isArrayBuffer=B,j.working="undefined"!=typeof ArrayBuffer&&"undefined"!=typeof DataView&&j(new DataView(new ArrayBuffer(1),0,1)),t.isDataView=C,T.working="undefined"!=typeof SharedArrayBuffer&&T(new SharedArrayBuffer),t.isSharedArrayBuffer=R,t.isAsyncFunction=L,t.isMapIterator=N,t.isSetIterator=I,t.isGeneratorObject=P,t.isWebAssemblyCompiledModule=O,t.isNumberObject=D,t.isStringObject=z,t.isBooleanObject=K,t.isBigIntObject=G,t.isSymbolObject=F,t.isBoxedPrimitive=U,t.isAnyArrayBuffer=J,["isProxy","isExternal","isModuleNamespaceObject"].forEach(function(e){Object.defineProperty(t,e,{enumerable:!1,value:function(){throw new Error(e+" is not supported in userland")}})})},function(e,t){e.exports=function(e){return e&&"object"==typeof e&&"function"==typeof e.copy&&"function"==typeof e.fill&&"function"==typeof e.readUInt8}},function(e,t){"use strict";var r="function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag,n=Object.prototype.toString,i=function(e){return!(r&&e&&"object"==typeof e&&Symbol.toStringTag in e)&&"[object Arguments]"===n.call(e)},a=function(e){return!!i(e)||null!==e&&"object"==typeof e&&"number"==typeof e.length&&e.length>=0&&"[object Array]"!==n.call(e)&&"[object Function]"===n.call(e.callee)},o=function(){return i(arguments)}();i.isLegacyArguments=a,e.exports=o?i:a},function(e,t){"use strict";var r=Object.prototype.toString,n=Function.prototype.toString,i=/^\s*(?:function)?\*/,a="function"==typeof Symbol&&"symbol"==typeof Symbol.toStringTag,o=Object.getPrototypeOf,s=function(){if(!a)return!1;try{return Function("return function*() {}")()}catch(e){}},u=s(),c=u?o(u):{};e.exports=function(e){if("function"!=typeof e)return!1;if(i.test(n.call(e)))return!0;if(!a){var t=r.call(e);return"[object GeneratorFunction]"===t}return o(e)===c}},function(e,t){"function"==typeof Object.create?e.exports=function(e,t){t&&(e.super_=t,e.prototype=Object.create(t.prototype,{constructor:{value:e,enumerable:!1,writable:!0,configurable:!0}}))}:e.exports=function(e,t){if(t){e.super_=t;var r=function(){};r.prototype=t.prototype,e.prototype=new r,e.prototype.constructor=e}}},function(e,t,r){var n=r(170).inherits;e.exports=function(e,t){function r(){this.init(),this._w=d,t.call(this,64,56)}function i(e,t){return e>>>t|e<<32-t}function a(e,t){return e>>>t}function o(e,t,r){return e&t^~e&r}function s(e,t,r){return e&t^e&r^t&r}function u(e){return i(e,2)^i(e,13)^i(e,22)}function c(e){return i(e,6)^i(e,11)^i(e,25)}function l(e){return i(e,7)^i(e,18)^a(e,3)}function f(e){return i(e,17)^i(e,19)^a(e,10)}var h=[1116352408,1899447441,3049323471,3921009573,961987163,1508970993,2453635748,2870763221,3624381080,310598401,607225278,1426881987,1925078388,2162078206,2614888103,3248222580,3835390401,4022224774,264347078,604807628,770255983,1249150122,1555081692,1996064986,2554220882,2821834349,2952996808,3210313671,3336571891,3584528711,113926993,338241895,666307205,773529912,1294757372,1396182291,1695183700,1986661051,2177026350,2456956037,2730485921,2820302411,3259730800,3345764771,3516065817,3600352804,4094571909,275423344,430227734,506948616,659060556,883997877,958139571,1322822218,1537002063,1747873779,1955562222,2024104815,2227730452,2361852424,2428436474,2756734187,3204031479,3329325298],d=new Array(64);return n(r,t),r.prototype.init=function(){return this._a=1779033703,this._b=-1150833019,this._c=1013904242,this._d=-1521486534,this._e=1359893119,this._f=-1694144372,this._g=528734635,this._h=1541459225,this._len=this._s=0,this},r.prototype._update=function(e){var t,r,n,i,a,d,p,m,y,g,v=this._w;t=0|this._a,r=0|this._b,n=0|this._c,i=0|this._d,a=0|this._e,d=0|this._f,p=0|this._g,m=0|this._h;for(var b=0;b<64;b++){var w=v[b]=b<16?e.readInt32BE(4*b):f(v[b-2])+v[b-7]+l(v[b-15])+v[b-16];y=m+c(a)+o(a,d,p)+h[b]+w,g=u(t)+s(t,r,n),m=p,p=d,d=a,a=i+y,i=n,n=r,r=t,t=y+g}this._a=t+this._a|0,this._b=r+this._b|0,this._c=n+this._c|0,this._d=i+this._d|0,this._e=a+this._e|0,this._f=d+this._f|0,this._g=p+this._g|0,this._h=m+this._h|0},r.prototype._hash=function(){var t=new e(32);return t.writeInt32BE(this._a,0),t.writeInt32BE(this._b,4),t.writeInt32BE(this._c,8),t.writeInt32BE(this._d,12),t.writeInt32BE(this._e,16),t.writeInt32BE(this._f,20),t.writeInt32BE(this._g,24),t.writeInt32BE(this._h,28),t},r}},function(e,t,r){var n=r(170).inherits;e.exports=function(e,t){function r(){this.init(),this._w=u,t.call(this,128,112)}function i(e,t,r){return e>>>r|t<<32-r}function a(e,t,r){return e&t^~e&r}function o(e,t,r){return e&t^e&r^t&r}var s=[1116352408,3609767458,1899447441,602891725,3049323471,3964484399,3921009573,2173295548,961987163,4081628472,1508970993,3053834265,2453635748,2937671579,2870763221,3664609560,3624381080,2734883394,310598401,1164996542,607225278,1323610764,1426881987,3590304994,1925078388,4068182383,2162078206,991336113,2614888103,633803317,3248222580,3479774868,3835390401,2666613458,4022224774,944711139,264347078,2341262773,604807628,2007800933,770255983,1495990901,1249150122,1856431235,1555081692,3175218132,1996064986,2198950837,2554220882,3999719339,2821834349,766784016,2952996808,2566594879,3210313671,3203337956,3336571891,1034457026,3584528711,2466948901,113926993,3758326383,338241895,168717936,666307205,1188179964,773529912,1546045734,1294757372,1522805485,1396182291,2643833823,1695183700,2343527390,1986661051,1014477480,2177026350,1206759142,2456956037,344077627,2730485921,1290863460,2820302411,3158454273,3259730800,3505952657,3345764771,106217008,3516065817,3606008344,3600352804,1432725776,4094571909,1467031594,275423344,851169720,430227734,3100823752,506948616,1363258195,659060556,3750685593,883997877,3785050280,958139571,3318307427,1322822218,3812723403,1537002063,2003034995,1747873779,3602036899,1955562222,1575990012,2024104815,1125592928,2227730452,2716904306,2361852424,442776044,2428436474,593698344,2756734187,3733110249,3204031479,2999351573,3329325298,3815920427,3391569614,3928383900,3515267271,566280711,3940187606,3454069534,4118630271,4000239992,116418474,1914138554,174292421,2731055270,289380356,3203993006,460393269,320620315,685471733,587496836,852142971,1086792851,1017036298,365543100,1126000580,2618297676,1288033470,3409855158,1501505948,4234509866,1607167915,987167468,1816402316,1246189591],u=new Array(160);return n(r,t),r.prototype.init=function(){return this._a=1779033703,this._b=-1150833019,this._c=1013904242,this._d=-1521486534,this._e=1359893119,this._f=-1694144372,this._g=528734635,this._h=1541459225,this._al=-205731576,this._bl=-2067093701,this._cl=-23791573,this._dl=1595750129,this._el=-1377402159,this._fl=725511199,this._gl=-79577749,this._hl=327033209,this._len=this._s=0,this},r.prototype._update=function(e){var t,r,n,u,c,l,f,h,d,p,m,y,g,v,b,w,_=this._w;t=0|this._a,r=0|this._b,n=0|this._c,u=0|this._d,c=0|this._e,l=0|this._f,f=0|this._g,h=0|this._h,d=0|this._al,p=0|this._bl,m=0|this._cl,y=0|this._dl,g=0|this._el,v=0|this._fl,b=0|this._gl,w=0|this._hl;for(var k=0;k<80;k++){var S,A,M=2*k;if(k<16)S=_[M]=e.readInt32BE(4*M),A=_[M+1]=e.readInt32BE(4*M+4);else{var x=_[M-30],E=_[M-30+1],B=i(x,E,1)^i(x,E,8)^x>>>7,j=i(E,x,1)^i(E,x,8)^i(E,x,7);x=_[M-4],E=_[M-4+1];var C=i(x,E,19)^i(E,x,29)^x>>>6,T=i(E,x,19)^i(x,E,29)^i(E,x,6),R=_[M-14],L=_[M-14+1],N=_[M-32],I=_[M-32+1];A=j+L,S=B+R+(A>>>0<j>>>0?1:0),A+=T,S=S+C+(A>>>0<T>>>0?1:0),A+=I,S=S+N+(A>>>0<I>>>0?1:0),_[M]=S,_[M+1]=A}var P=o(t,r,n),O=o(d,p,m),D=i(t,d,28)^i(d,t,2)^i(d,t,7),z=i(d,t,28)^i(t,d,2)^i(t,d,7),K=i(c,g,14)^i(c,g,18)^i(g,c,9),G=i(g,c,14)^i(g,c,18)^i(c,g,9),F=s[M],U=s[M+1],J=a(c,l,f),H=a(g,v,b),W=w+G,V=h+K+(W>>>0<w>>>0?1:0);W+=H,V=V+J+(W>>>0<H>>>0?1:0),W+=U,V=V+F+(W>>>0<U>>>0?1:0),W+=A,V=V+S+(W>>>0<A>>>0?1:0);var q=z+O,Y=D+P+(q>>>0<z>>>0?1:0);h=f,w=b,f=l,b=v,l=c,v=g,g=y+W|0,c=u+V+(g>>>0<y>>>0?1:0)|0,u=n,y=m,n=r,m=p,r=t,p=d,d=W+q|0,t=V+Y+(d>>>0<W>>>0?1:0)|0}this._al=this._al+d|0,this._bl=this._bl+p|0,this._cl=this._cl+m|0,this._dl=this._dl+y|0,this._el=this._el+g|0,this._fl=this._fl+v|0,this._gl=this._gl+b|0,this._hl=this._hl+w|0,this._a=this._a+t+(this._al>>>0<d>>>0?1:0)|0,this._b=this._b+r+(this._bl>>>0<p>>>0?1:0)|0,this._c=this._c+n+(this._cl>>>0<m>>>0?1:0)|0,this._d=this._d+u+(this._dl>>>0<y>>>0?1:0)|0,this._e=this._e+c+(this._el>>>0<g>>>0?1:0)|0,this._f=this._f+l+(this._fl>>>0<v>>>0?1:0)|0,this._g=this._g+f+(this._gl>>>0<b>>>0?1:0)|0,this._h=this._h+h+(this._hl>>>0<w>>>0?1:0)|0},r.prototype._hash=function(){function t(e,t,n){r.writeInt32BE(e,n),r.writeInt32BE(t,n+4)}var r=new e(64);return t(this._a,this._al,0),t(this._b,this._bl,8),t(this._c,this._cl,16),t(this._d,this._dl,24),t(this._e,this._el,32),t(this._f,this._fl,40),t(this._g,this._gl,48),t(this._h,this._hl,56),r},r}},function(e,t,r){function n(e,t){e[t>>5]|=128<<t%32,e[(t+64>>>9<<4)+14]=t;for(var r=1732584193,n=-271733879,i=-1732584194,l=271733878,f=0;f<e.length;f+=16){var h=r,d=n,p=i,m=l;r=a(r,n,i,l,e[f+0],7,-680876936),l=a(l,r,n,i,e[f+1],12,-389564586),i=a(i,l,r,n,e[f+2],17,606105819),n=a(n,i,l,r,e[f+3],22,-1044525330),r=a(r,n,i,l,e[f+4],7,-176418897),l=a(l,r,n,i,e[f+5],12,1200080426),i=a(i,l,r,n,e[f+6],17,-1473231341),n=a(n,i,l,r,e[f+7],22,-45705983),r=a(r,n,i,l,e[f+8],7,1770035416),l=a(l,r,n,i,e[f+9],12,-1958414417),i=a(i,l,r,n,e[f+10],17,-42063),n=a(n,i,l,r,e[f+11],22,-1990404162),r=a(r,n,i,l,e[f+12],7,1804603682),l=a(l,r,n,i,e[f+13],12,-40341101),i=a(i,l,r,n,e[f+14],17,-1502002290),n=a(n,i,l,r,e[f+15],22,1236535329),r=o(r,n,i,l,e[f+1],5,-165796510),l=o(l,r,n,i,e[f+6],9,-1069501632),i=o(i,l,r,n,e[f+11],14,643717713),n=o(n,i,l,r,e[f+0],20,-373897302),r=o(r,n,i,l,e[f+5],5,-701558691),l=o(l,r,n,i,e[f+10],9,38016083),i=o(i,l,r,n,e[f+15],14,-660478335),n=o(n,i,l,r,e[f+4],20,-405537848),r=o(r,n,i,l,e[f+9],5,568446438),l=o(l,r,n,i,e[f+14],9,-1019803690),i=o(i,l,r,n,e[f+3],14,-187363961),n=o(n,i,l,r,e[f+8],20,1163531501),r=o(r,n,i,l,e[f+13],5,-1444681467),l=o(l,r,n,i,e[f+2],9,-51403784),i=o(i,l,r,n,e[f+7],14,1735328473),n=o(n,i,l,r,e[f+12],20,-1926607734),r=s(r,n,i,l,e[f+5],4,-378558),l=s(l,r,n,i,e[f+8],11,-2022574463),i=s(i,l,r,n,e[f+11],16,1839030562),n=s(n,i,l,r,e[f+14],23,-35309556),r=s(r,n,i,l,e[f+1],4,-1530992060),l=s(l,r,n,i,e[f+4],11,1272893353),i=s(i,l,r,n,e[f+7],16,-155497632),n=s(n,i,l,r,e[f+10],23,-1094730640),r=s(r,n,i,l,e[f+13],4,681279174),l=s(l,r,n,i,e[f+0],11,-358537222),i=s(i,l,r,n,e[f+3],16,-722521979),n=s(n,i,l,r,e[f+6],23,76029189),r=s(r,n,i,l,e[f+9],4,-640364487),l=s(l,r,n,i,e[f+12],11,-421815835),i=s(i,l,r,n,e[f+15],16,530742520),n=s(n,i,l,r,e[f+2],23,-995338651),r=u(r,n,i,l,e[f+0],6,-198630844),l=u(l,r,n,i,e[f+7],10,1126891415),i=u(i,l,r,n,e[f+14],15,-1416354905),n=u(n,i,l,r,e[f+5],21,-57434055),r=u(r,n,i,l,e[f+12],6,1700485571),l=u(l,r,n,i,e[f+3],10,-1894986606),i=u(i,l,r,n,e[f+10],15,-1051523),n=u(n,i,l,r,e[f+1],21,-2054922799),r=u(r,n,i,l,e[f+8],6,1873313359),l=u(l,r,n,i,e[f+15],10,-30611744),i=u(i,l,r,n,e[f+6],15,-1560198380),n=u(n,i,l,r,e[f+13],21,1309151649),r=u(r,n,i,l,e[f+4],6,-145523070),l=u(l,r,n,i,e[f+11],10,-1120210379),i=u(i,l,r,n,e[f+2],15,718787259),n=u(n,i,l,r,e[f+9],21,-343485551),r=c(r,h),n=c(n,d),i=c(i,p),l=c(l,m)}return Array(r,n,i,l)}function i(e,t,r,n,i,a){return c(l(c(c(t,e),c(n,a)),i),r)}function a(e,t,r,n,a,o,s){return i(t&r|~t&n,e,t,a,o,s)}function o(e,t,r,n,a,o,s){return i(t&n|r&~n,e,t,a,o,s)}function s(e,t,r,n,a,o,s){return i(t^r^n,e,t,a,o,s)}function u(e,t,r,n,a,o,s){return i(r^(t|~n),e,t,a,o,s)}function c(e,t){var r=(65535&e)+(65535&t),n=(e>>16)+(t>>16)+(r>>16);return n<<16|65535&r}function l(e,t){return e<<t|e>>>32-t}var f=r(180);e.exports=function(e){return f.hash(e,n,16)}},function(e,t,r){(function(t){function r(e,r){if(e.length%a!==0){var n=e.length+(a-e.length%a);e=t.concat([e,o],n)}for(var i=[],s=r?e.readInt32BE:e.readInt32LE,u=0;u<e.length;u+=a)i.push(s.call(e,u));return i}function n(e,r,n){for(var i=new t(r),a=n?i.writeInt32BE:i.writeInt32LE,o=0;o<e.length;o++)a.call(i,e[o],4*o,!0);return i}function i(e,i,a,o){t.isBuffer(e)||(e=new t(e));var u=i(r(e,o),e.length*s);return n(u,a,o)}var a=4,o=new t(a);o.fill(0);var s=8;e.exports={hash:i}}).call(t,r(160).Buffer)},function(e,t,r){(function(t){function r(e,t,r){return e^t^r}function n(e,t,r){return e&t|~e&r}function i(e,t,r){return(e|~t)^r}function a(e,t,r){return e&r|t&~r}function o(e,t,r){return e^(t|~r)}function s(e,t){return e<<t|e>>>32-t}function u(e){var r=[1732584193,4023233417,2562383102,271733878,3285377520];"string"==typeof e&&(e=new t(e,"utf8"));var n=m(e),i=8*e.length,a=8*e.length;n[i>>>5]|=128<<24-i%32,n[(i+64>>>9<<4)+14]=16711935&(a<<8|a>>>24)|4278255360&(a<<24|a>>>8);for(var o=0;o<n.length;o+=16)g(r,n,o);for(var o=0;o<5;o++){var s=r[o];r[o]=16711935&(s<<8|s>>>24)|4278255360&(s<<24|s>>>8)}var u=y(r);return new t(u)}e.exports=u;/** @preserve
(c) 2012 by Cédric Mesnil. All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
| THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var c=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,7,4,13,1,10,6,15,3,12,0,9,5,2,14,11,8,3,10,14,4,9,15,8,1,2,7,0,6,13,11,5,12,1,9,11,10,0,8,12,4,13,3,7,15,14,5,6,2,4,0,5,9,7,12,2,10,14,1,3,8,11,6,15,13],l=[5,14,7,0,9,2,11,4,13,6,15,8,1,10,3,12,6,11,3,7,0,13,5,10,14,15,8,12,4,9,1,2,15,5,1,3,7,14,6,9,11,8,12,2,10,0,4,13,8,6,4,1,3,11,15,0,5,12,2,13,9,7,10,14,12,15,10,4,1,5,8,7,6,2,13,14,0,3,9,11],f=[11,14,15,12,5,8,7,9,11,13,14,15,6,7,9,8,7,6,8,13,11,9,7,15,7,12,15,9,11,7,13,12,11,13,6,7,14,9,13,15,14,8,13,6,5,12,7,5,11,12,14,15,14,15,9,8,9,14,5,6,8,6,5,12,9,15,5,11,6,8,13,12,5,12,13,14,11,8,5,6],h=[8,9,9,11,13,15,15,5,7,7,8,11,14,14,12,6,9,13,15,7,12,8,9,11,7,7,12,7,6,15,13,11,9,7,15,11,8,6,6,14,12,13,5,14,13,13,7,5,15,5,8,11,14,14,6,14,6,9,12,9,12,5,15,8,8,5,12,9,12,5,14,6,8,13,6,5,15,13,11,11],d=[0,1518500249,1859775393,2400959708,2840853838],p=[1352829926,1548603684,1836072691,2053994217,0],m=function(e){for(var t=[],r=0,n=0;r<e.length;r++,n+=8)t[n>>>5]|=e[r]<<24-n%32;return t},y=function(e){for(var t=[],r=0;r<32*e.length;r+=8)t.push(e[r>>>5]>>>24-r%32&255);return t},g=function(e,t,u){for(var m=0;m<16;m++){var y=u+m,g=t[y];t[y]=16711935&(g<<8|g>>>24)|4278255360&(g<<24|g>>>8)}var v,b,w,_,k,S,A,M,x,E;S=v=e[0],A=b=e[1],M=w=e[2],x=_=e[3],E=k=e[4];for(var B,m=0;m<80;m+=1)B=v+t[u+c[m]]|0,B+=m<16?r(b,w,_)+d[0]:m<32?n(b,w,_)+d[1]:m<48?i(b,w,_)+d[2]:m<64?a(b,w,_)+d[3]:o(b,w,_)+d[4],B|=0,B=s(B,f[m]),B=B+k|0,v=k,k=_,_=s(w,10),w=b,b=B,B=S+t[u+l[m]]|0,B+=m<16?o(A,M,x)+p[0]:m<32?a(A,M,x)+p[1]:m<48?i(A,M,x)+p[2]:m<64?n(A,M,x)+p[3]:r(A,M,x)+p[4],B|=0,B=s(B,h[m]),B=B+E|0,S=E,E=x,x=s(M,10),M=A,A=B;B=e[1]+w+x|0,e[1]=e[2]+_+E|0,e[2]=e[3]+k+S|0,e[3]=e[4]+v+A|0,e[4]=e[0]+b+M|0,e[0]=B}}).call(t,r(160).Buffer)},function(e,t,r){(function(t){function n(e,r){if(!(this instanceof n))return new n(e,r);this._opad=u,this._alg=e;var o="sha512"===e?128:64;r=this._key=t.isBuffer(r)?r:new t(r),r.length>o?r=i(e).update(r).digest():r.length<o&&(r=t.concat([r,a],o));for(var s=this._ipad=new t(o),u=this._opad=new t(o),c=0;c<o;c++)s[c]=54^r[c],u[c]=92^r[c];this._hash=i(e).update(s)}var i=r(166),a=new t(128);a.fill(0),e.exports=n,n.prototype.update=function(e,t){return this._hash.update(e,t),this},n.prototype.digest=function(e){var t=this._hash.digest();return i(this._alg).update(this._opad).update(t).digest(e)}}).call(t,r(160).Buffer)},function(e,t,r){var n=r(184);e.exports=function(e,t){t=t||{};var r=n(e);return t.pbkdf2=r.pbkdf2,t.pbkdf2Sync=r.pbkdf2Sync,t}},function(e,t,r){(function(t){e.exports=function(e){function r(e,t,r,i,a,o){if("function"==typeof a&&(o=a,a=void 0),"function"!=typeof o)throw new Error("No callback provided to pbkdf2");setTimeout(function(){var s;try{s=n(e,t,r,i,a)}catch(e){return o(e)}o(void 0,s)})}function n(r,n,i,a,o){if("number"!=typeof i)throw new TypeError("Iterations not a number");if(i<0)throw new TypeError("Bad iterations");if("number"!=typeof a)throw new TypeError("Key length not a number");if(a<0)throw new TypeError("Bad key length");o=o||"sha1",t.isBuffer(r)||(r=new t(r)),t.isBuffer(n)||(n=new t(n));var s,u,c,l=1,f=new t(a),h=new t(n.length+4);n.copy(h,0,0,n.length);for(var d=1;d<=l;d++){h.writeUInt32BE(d,n.length);var p=e.createHmac(o,r).update(h).digest();if(!s&&(s=p.length,c=new t(s),l=Math.ceil(a/s),u=a-(l-1)*s,a>(Math.pow(2,32)-1)*s))throw new TypeError("keylen exceeds maximum length");p.copy(c,0,0,s);for(var m=1;m<i;m++){p=e.createHmac(o,r).update(p).digest();for(var y=0;y<s;y++)c[y]^=p[y]}var g=(d-1)*s,v=d==l?u:s;c.copy(f,g,0,v)}return f}return{pbkdf2:r,pbkdf2Sync:n}}}).call(t,r(160).Buffer)},function(e,t,r){e.exports=function(e,t){function n(){return Object.keys(o)}t=t||{};var i=r(186)(e);t.createCipher=i.createCipher,t.createCipheriv=i.createCipheriv;var a=r(217)(e);t.createDecipher=a.createDecipher,t.createDecipheriv=a.createDecipheriv;var o=r(208);t.listCiphers=n}},function(e,t,r){(function(t){function n(e,r,s){return this instanceof n?(o.call(this),this._cache=new i,this._cipher=new a.AES(r),this._prev=new t(s.length),s.copy(this._prev),void(this._mode=e)):new n(e,r,s)}function i(){return this instanceof i?void(this.cache=new t("")):new i}var a=r(187),o=r(188),s=r(176),u=r(208),c=r(209),l=r(210);s(n,o),n.prototype._transform=function(e,t,r){this._cache.add(e);for(var n,i;n=this._cache.get();)i=this._mode.encrypt(this,n),this.push(i);r()},n.prototype._flush=function(e){var t=this._cache.flush();this.push(this._mode.encrypt(this,t)),this._cipher.scrub(),e()},i.prototype.add=function(e){this.cache=t.concat([this.cache,e])},i.prototype.get=function(){if(this.cache.length>15){var e=this.cache.slice(0,16);return this.cache=this.cache.slice(16),e}return null},i.prototype.flush=function(){for(var e=16-this.cache.length,r=new t(e),n=-1;++n<e;)r.writeUInt8(e,n);var i=t.concat([this.cache,r]);return i};var f={ECB:r(211),CBC:r(212),CFB:r(214),OFB:r(215),CTR:r(216)};e.exports=function(e){function r(e,r,i){var a=u[e];if(!a)throw new TypeError("invalid suite type");if("string"==typeof i&&(i=new t(i)),"string"==typeof r&&(r=new t(r)),r.length!==a.key/8)throw new TypeError("invalid key length "+r.length);if(i.length!==a.iv)throw new TypeError("invalid iv length "+i.length);return"stream"===a.type?new l(f[a.mode],r,i):new n(f[a.mode],r,i)}function i(t,n){var i=u[t];if(!i)throw new TypeError("invalid suite type");var a=c(e,n,i.key,i.iv);return r(t,a.key,a.iv)}return{createCipher:i,createCipheriv:r}}}).call(t,r(160).Buffer)},function(e,t,r){(function(e){function r(e){var t,r;return t=e>s||e<0?(r=Math.abs(e)%s,e<0?s-r:r):e}function n(e){var t,r,n;for(t=r=0,n=e.length;0<=n?r<n:r>n;t=0<=n?++r:--r)e[t]=0;return!1}function i(){var e;this.SBOX=[],this.INV_SBOX=[],this.SUB_MIX=function(){var t,r;for(r=[],e=t=0;t<4;e=++t)r.push([]);return r}(),this.INV_SUB_MIX=function(){var t,r;for(r=[],e=t=0;t<4;e=++t)r.push([]);return r}(),this.init(),this.RCON=[0,1,2,4,8,16,32,64,128,27,54]}function a(e){for(var t=e.length/4,r=new Array(t),n=-1;++n<t;)r[n]=e.readUInt32BE(4*n);return r}function o(e){this._key=a(e),this._doReset()}var s=Math.pow(2,32);i.prototype.init=function(){var e,t,r,n,i,a,o,s,u,c;for(e=function(){var e,r;for(r=[],t=e=0;e<256;t=++e)t<128?r.push(t<<1):r.push(t<<1^283);return r}(),i=0,u=0,t=c=0;c<256;t=++c)r=u^u<<1^u<<2^u<<3^u<<4,r=r>>>8^255&r^99,this.SBOX[i]=r,this.INV_SBOX[r]=i,a=e[i],o=e[a],s=e[o],n=257*e[r]^16843008*r,this.SUB_MIX[0][i]=n<<24|n>>>8,this.SUB_MIX[1][i]=n<<16|n>>>16,this.SUB_MIX[2][i]=n<<8|n>>>24,this.SUB_MIX[3][i]=n,n=16843009*s^65537*o^257*a^16843008*i,this.INV_SUB_MIX[0][r]=n<<24|n>>>8,this.INV_SUB_MIX[1][r]=n<<16|n>>>16,this.INV_SUB_MIX[2][r]=n<<8|n>>>24,this.INV_SUB_MIX[3][r]=n,0===i?i=u=1:(i=a^e[e[e[s^a]]],u^=e[e[u]]);return!0};var u=new i;o.blockSize=16,o.prototype.blockSize=o.blockSize,o.keySize=32,o.prototype.keySize=o.keySize,o.ivSize=o.blockSize,o.prototype.ivSize=o.ivSize,o.prototype._doReset=function(){var e,t,r,n,i,a,o,s;for(r=this._key,t=r.length,this._nRounds=t+6,i=4*(this._nRounds+1),this._keySchedule=[],n=o=0;0<=i?o<i:o>i;n=0<=i?++o:--o)this._keySchedule[n]=n<t?r[n]:(a=this._keySchedule[n-1],n%t===0?(a=a<<8|a>>>24,a=u.SBOX[a>>>24]<<24|u.SBOX[a>>>16&255]<<16|u.SBOX[a>>>8&255]<<8|u.SBOX[255&a],a^=u.RCON[n/t|0]<<24):t>6&&n%t===4?a=u.SBOX[a>>>24]<<24|u.SBOX[a>>>16&255]<<16|u.SBOX[a>>>8&255]<<8|u.SBOX[255&a]:void 0,this._keySchedule[n-t]^a);for(this._invKeySchedule=[],e=s=0;0<=i?s<i:s>i;e=0<=i?++s:--s)n=i-e,a=this._keySchedule[n-(e%4?0:4)],this._invKeySchedule[e]=e<4||n<=4?a:u.INV_SUB_MIX[0][u.SBOX[a>>>24]]^u.INV_SUB_MIX[1][u.SBOX[a>>>16&255]]^u.INV_SUB_MIX[2][u.SBOX[a>>>8&255]]^u.INV_SUB_MIX[3][u.SBOX[255&a]];return!0},o.prototype.encryptBlock=function(t){t=a(new e(t));var r=this._doCryptBlock(t,this._keySchedule,u.SUB_MIX,u.SBOX),n=new e(16);return n.writeUInt32BE(r[0],0),n.writeUInt32BE(r[1],4),n.writeUInt32BE(r[2],8),n.writeUInt32BE(r[3],12),n},o.prototype.decryptBlock=function(t){t=a(new e(t));var r=[t[3],t[1]];t[1]=r[0],t[3]=r[1];var n=this._doCryptBlock(t,this._invKeySchedule,u.INV_SUB_MIX,u.INV_SBOX),i=new e(16);return i.writeUInt32BE(n[0],0),i.writeUInt32BE(n[3],4),i.writeUInt32BE(n[2],8),i.writeUInt32BE(n[1],12),i},o.prototype.scrub=function(){n(this._keySchedule),n(this._invKeySchedule),n(this._key)},o.prototype._doCryptBlock=function(e,t,n,i){var a,o,s,u,c,l,f,h,d,p,m,y;for(s=e[0]^t[0],u=e[1]^t[1],c=e[2]^t[2],l=e[3]^t[3],a=4,o=m=1,y=this._nRounds;1<=y?m<y:m>y;o=1<=y?++m:--m)f=n[0][s>>>24]^n[1][u>>>16&255]^n[2][c>>>8&255]^n[3][255&l]^t[a++],h=n[0][u>>>24]^n[1][c>>>16&255]^n[2][l>>>8&255]^n[3][255&s]^t[a++],d=n[0][c>>>24]^n[1][l>>>16&255]^n[2][s>>>8&255]^n[3][255&u]^t[a++],p=n[0][l>>>24]^n[1][s>>>16&255]^n[2][u>>>8&255]^n[3][255&c]^t[a++],s=f,u=h,c=d,l=p;return f=(i[s>>>24]<<24|i[u>>>16&255]<<16|i[c>>>8&255]<<8|i[255&l])^t[a++],h=(i[u>>>24]<<24|i[c>>>16&255]<<16|i[l>>>8&255]<<8|i[255&s])^t[a++],d=(i[c>>>24]<<24|i[l>>>16&255]<<16|i[s>>>8&255]<<8|i[255&u])^t[a++],p=(i[l>>>24]<<24|i[s>>>16&255]<<16|i[u>>>8&255]<<8|i[255&c])^t[a++],[r(f),r(h),r(d),r(p)]},t.AES=o}).call(t,r(160).Buffer)},function(e,t,r){(function(t){function n(){i.call(this)}var i=r(189).Transform,a=r(176);e.exports=n,a(n,i),n.prototype.update=function(e,r,n){this.write(e,r);for(var i,a=new t("");i=this.read();)a=t.concat([a,i]);return n&&(a=a.toString(n)),a},n.prototype.final=function(e){this.end();for(var r,n=new t("");r=this.read();)n=t.concat([n,r]);return e&&(n=n.toString(e)),n}}).call(t,r(160).Buffer)},function(e,t,r){function n(){i.call(this)}e.exports=n;var i=r(190).EventEmitter,a=r(176);a(n,i),n.Readable=r(191),n.Writable=r(204),n.Duplex=r(205),n.Transform=r(206),n.PassThrough=r(207),n.Stream=n,n.prototype.pipe=function(e,t){function r(t){e.writable&&!1===e.write(t)&&c.pause&&c.pause()}function n(){c.readable&&c.resume&&c.resume()}function a(){l||(l=!0,e.end())}function o(){l||(l=!0,"function"==typeof e.destroy&&e.destroy())}function s(e){if(u(),0===i.listenerCount(this,"error"))throw e}function u(){c.removeListener("data",r),e.removeListener("drain",n),c.removeListener("end",a),c.removeListener("close",o),c.removeListener("error",s),e.removeListener("error",s),c.removeListener("end",u),c.removeListener("close",u),e.removeListener("close",u)}var c=this;c.on("data",r),e.on("drain",n),e._isStdio||t&&t.end===!1||(c.on("end",a),c.on("close",o));var l=!1;return c.on("error",s),e.on("error",s),c.on("end",u),c.on("close",u),e.on("close",u),e.emit("pipe",c),e}},function(e,t){function r(){this._events=this._events||{},this._maxListeners=this._maxListeners||void 0}function n(e){return"function"==typeof e}function i(e){return"number"==typeof e}function a(e){return"object"==typeof e&&null!==e}function o(e){return void 0===e}e.exports=r,r.EventEmitter=r,r.prototype._events=void 0,r.prototype._maxListeners=void 0,r.defaultMaxListeners=10,r.prototype.setMaxListeners=function(e){if(!i(e)||e<0||isNaN(e))throw TypeError("n must be a positive number");return this._maxListeners=e,this},r.prototype.emit=function(e){var t,r,i,s,u,c;if(this._events||(this._events={}),"error"===e&&(!this._events.error||a(this._events.error)&&!this._events.error.length)){if(t=arguments[1],t instanceof Error)throw t;var l=new Error('Uncaught, unspecified "error" event. ('+t+")");throw l.context=t,l}if(r=this._events[e],o(r))return!1;if(n(r))switch(arguments.length){case 1:r.call(this);break;case 2:r.call(this,arguments[1]);break;case 3:r.call(this,arguments[1],arguments[2]);break;default:s=Array.prototype.slice.call(arguments,1),r.apply(this,s)}else if(a(r))for(s=Array.prototype.slice.call(arguments,1),c=r.slice(),i=c.length,u=0;u<i;u++)c[u].apply(this,s);return!0},r.prototype.addListener=function(e,t){var i;if(!n(t))throw TypeError("listener must be a function");return this._events||(this._events={}),this._events.newListener&&this.emit("newListener",e,n(t.listener)?t.listener:t),this._events[e]?a(this._events[e])?this._events[e].push(t):this._events[e]=[this._events[e],t]:this._events[e]=t,a(this._events[e])&&!this._events[e].warned&&(i=o(this._maxListeners)?r.defaultMaxListeners:this._maxListeners,i&&i>0&&this._events[e].length>i&&(this._events[e].warned=!0,console.error("(node) warning: possible EventEmitter memory leak detected. %d listeners added. Use emitter.setMaxListeners() to increase limit.",this._events[e].length),"function"==typeof console.trace&&console.trace())),this},r.prototype.on=r.prototype.addListener,r.prototype.once=function(e,t){function r(){this.removeListener(e,r),i||(i=!0,t.apply(this,arguments))}if(!n(t))throw TypeError("listener must be a function");var i=!1;return r.listener=t,this.on(e,r),this},r.prototype.removeListener=function(e,t){var r,i,o,s;if(!n(t))throw TypeError("listener must be a function");if(!this._events||!this._events[e])return this;if(r=this._events[e],o=r.length,i=-1,r===t||n(r.listener)&&r.listener===t)delete this._events[e],this._events.removeListener&&this.emit("removeListener",e,t);else if(a(r)){for(s=o;s-- >0;)if(r[s]===t||r[s].listener&&r[s].listener===t){i=s;break}if(i<0)return this;1===r.length?(r.length=0,delete this._events[e]):r.splice(i,1),this._events.removeListener&&this.emit("removeListener",e,t)}return this},r.prototype.removeAllListeners=function(e){var t,r;if(!this._events)return this;if(!this._events.removeListener)return 0===arguments.length?this._events={}:this._events[e]&&delete this._events[e],this;if(0===arguments.length){for(t in this._events)"removeListener"!==t&&this.removeAllListeners(t);return this.removeAllListeners("removeListener"),this._events={},this}if(r=this._events[e],n(r))this.removeListener(e,r);else if(r)for(;r.length;)this.removeListener(e,r[r.length-1]);return delete this._events[e],this},r.prototype.listeners=function(e){var t;return t=this._events&&this._events[e]?n(this._events[e])?[this._events[e]]:this._events[e].slice():[]},r.prototype.listenerCount=function(e){if(this._events){var t=this._events[e];if(n(t))return 1;if(t)return t.length}return 0},r.listenerCount=function(e,t){return e.listenerCount(t)}},function(e,t,r){var n=function(){try{return r(189)}catch(e){}}();t=e.exports=r(192),t.Stream=n||t,t.Readable=t,t.Writable=r(197),t.Duplex=r(196),t.Transform=r(202),t.PassThrough=r(203)},function(e,t,r){(function(t){"use strict";function n(e,t){N=N||r(196),e=e||{},this.objectMode=!!e.objectMode,t instanceof N&&(this.objectMode=this.objectMode||!!e.readableObjectMode);var n=e.highWaterMark,i=this.objectMode?16:16384;this.highWaterMark=n||0===n?n:i,this.highWaterMark=~~this.highWaterMark,this.buffer=[],this.length=0,this.pipes=null,this.pipesCount=0,this.flowing=null,this.ended=!1,this.endEmitted=!1,this.reading=!1,this.sync=!0,this.needReadable=!1,this.emittedReadable=!1,this.readableListening=!1,this.resumeScheduled=!1,this.defaultEncoding=e.defaultEncoding||"utf8",this.ranOut=!1,this.awaitDrain=0,this.readingMore=!1,this.decoder=null,this.encoding=null,e.encoding&&(L||(L=r(201).StringDecoder),this.decoder=new L(e.encoding),this.encoding=e.encoding)}function i(e){return N=N||r(196),this instanceof i?(this._readableState=new n(e,this),this.readable=!0,e&&"function"==typeof e.read&&(this._read=e.read),void B.call(this)):new i(e)}function a(e,t,r,n,i){var a=c(t,r);if(a)e.emit("error",a);else if(null===r)t.reading=!1,l(e,t);else if(t.objectMode||r&&r.length>0)if(t.ended&&!i){var s=new Error("stream.push() after EOF");e.emit("error",s)}else if(t.endEmitted&&i){var s=new Error("stream.unshift() after end event");e.emit("error",s)}else{var u;!t.decoder||i||n||(r=t.decoder.write(r),u=!t.objectMode&&0===r.length),i||(t.reading=!1),u||(t.flowing&&0===t.length&&!t.sync?(e.emit("data",r),e.read(0)):(t.length+=t.objectMode?1:r.length,i?t.buffer.unshift(r):t.buffer.push(r),t.needReadable&&f(e))),d(e,t)}else i||(t.reading=!1);return o(t)}function o(e){return!e.ended&&(e.needReadable||e.length<e.highWaterMark||0===e.length)}function s(e){return e>=I?e=I:(e--,e|=e>>>1,e|=e>>>2,e|=e>>>4,e|=e>>>8,e|=e>>>16,e++),e}function u(e,t){return 0===t.length&&t.ended?0:t.objectMode?0===e?0:1:null===e||isNaN(e)?t.flowing&&t.buffer.length?t.buffer[0].length:t.length:e<=0?0:(e>t.highWaterMark&&(t.highWaterMark=s(e)),e>t.length?t.ended?t.length:(t.needReadable=!0,0):e)}function c(e,t){var r=null;return E.isBuffer(t)||"string"==typeof t||null===t||void 0===t||e.objectMode||(r=new TypeError("Invalid non-string/buffer chunk")),r}function l(e,t){if(!t.ended){if(t.decoder){var r=t.decoder.end();r&&r.length&&(t.buffer.push(r),t.length+=t.objectMode?1:r.length)}t.ended=!0,f(e)}}function f(e){var t=e._readableState;t.needReadable=!1,t.emittedReadable||(R("emitReadable",t.flowing),t.emittedReadable=!0,t.sync?M(h,e):h(e))}function h(e){R("emit readable"),e.emit("readable"),b(e)}function d(e,t){t.readingMore||(t.readingMore=!0,M(p,e,t))}function p(e,t){for(var r=t.length;!t.reading&&!t.flowing&&!t.ended&&t.length<t.highWaterMark&&(R("maybeReadMore read 0"),e.read(0),r!==t.length);)r=t.length;t.readingMore=!1}function m(e){return function(){var t=e._readableState;R("pipeOnDrain",t.awaitDrain),t.awaitDrain&&t.awaitDrain--,0===t.awaitDrain&&j(e,"data")&&(t.flowing=!0,b(e))}}function y(e){R("readable nexttick read 0"),e.read(0)}function g(e,t){t.resumeScheduled||(t.resumeScheduled=!0,M(v,e,t))}function v(e,t){t.reading||(R("resume read 0"),e.read(0)),t.resumeScheduled=!1,e.emit("resume"),b(e),t.flowing&&!t.reading&&e.read(0)}function b(e){var t=e._readableState;if(R("flow",t.flowing),t.flowing)do var r=e.read();while(null!==r&&t.flowing)}function w(e,t){var r,n=t.buffer,i=t.length,a=!!t.decoder,o=!!t.objectMode;if(0===n.length)return null;if(0===i)r=null;else if(o)r=n.shift();else if(!e||e>=i)r=a?n.join(""):1===n.length?n[0]:E.concat(n,i),n.length=0;else if(e<n[0].length){var s=n[0];r=s.slice(0,e),n[0]=s.slice(e)}else if(e===n[0].length)r=n.shift();else{r=a?"":new E(e);for(var u=0,c=0,l=n.length;c<l&&u<e;c++){var s=n[0],f=Math.min(e-u,s.length);a?r+=s.slice(0,f):s.copy(r,u,0,f),f<s.length?n[0]=s.slice(f):n.shift(),u+=f}}return r}function _(e){var t=e._readableState;if(t.length>0)throw new Error("endReadable called on non-empty stream");t.endEmitted||(t.ended=!0,M(k,t,e))}function k(e,t){e.endEmitted||0!==e.length||(e.endEmitted=!0,t.readable=!1,t.emit("end"))}function S(e,t){for(var r=0,n=e.length;r<n;r++)t(e[r],r)}function A(e,t){for(var r=0,n=e.length;r<n;r++)if(e[r]===t)return r;return-1}e.exports=i;var M=r(193),x=r(163),E=r(160).Buffer;i.ReadableState=n;var B,j=(r(190),function(e,t){return e.listeners(t).length});!function(){try{B=r(189)}catch(e){}finally{B||(B=r(190).EventEmitter)}}();var E=r(160).Buffer,C=r(194);C.inherits=r(176);var T=r(195),R=void 0;R=T&&T.debuglog?T.debuglog("stream"):function(){};var L;C.inherits(i,B);var N,N;i.prototype.push=function(e,t){var r=this._readableState;return r.objectMode||"string"!=typeof e||(t=t||r.defaultEncoding,t!==r.encoding&&(e=new E(e,t),t="")),a(this,r,e,t,!1)},i.prototype.unshift=function(e){var t=this._readableState;return a(this,t,e,"",!0)},i.prototype.isPaused=function(){return this._readableState.flowing===!1},i.prototype.setEncoding=function(e){return L||(L=r(201).StringDecoder),this._readableState.decoder=new L(e),this._readableState.encoding=e,this};var I=8388608;i.prototype.read=function(e){R("read",e);var t=this._readableState,r=e;if(("number"!=typeof e||e>0)&&(t.emittedReadable=!1),0===e&&t.needReadable&&(t.length>=t.highWaterMark||t.ended))return R("read: emitReadable",t.length,t.ended),0===t.length&&t.ended?_(this):f(this),null;if(e=u(e,t),0===e&&t.ended)return 0===t.length&&_(this),null;var n=t.needReadable;R("need readable",n),(0===t.length||t.length-e<t.highWaterMark)&&(n=!0,R("length less than watermark",n)),(t.ended||t.reading)&&(n=!1,R("reading or ended",n)),n&&(R("do read"),t.reading=!0,t.sync=!0,0===t.length&&(t.needReadable=!0),this._read(t.highWaterMark),t.sync=!1),n&&!t.reading&&(e=u(r,t));var i;return i=e>0?w(e,t):null,null===i&&(t.needReadable=!0,e=0),t.length-=e,0!==t.length||t.ended||(t.needReadable=!0),r!==e&&t.ended&&0===t.length&&_(this),null!==i&&this.emit("data",i),i},i.prototype._read=function(e){this.emit("error",new Error("not implemented"))},i.prototype.pipe=function(e,r){function n(e){R("onunpipe"),e===f&&a()}function i(){R("onend"),e.end()}function a(){R("cleanup"),e.removeListener("close",u),e.removeListener("finish",c),e.removeListener("drain",y),e.removeListener("error",s),e.removeListener("unpipe",n),f.removeListener("end",i),f.removeListener("end",a),f.removeListener("data",o),g=!0,!h.awaitDrain||e._writableState&&!e._writableState.needDrain||y()}function o(t){R("ondata");var r=e.write(t);!1===r&&(1!==h.pipesCount||h.pipes[0]!==e||1!==f.listenerCount("data")||g||(R("false write response, pause",f._readableState.awaitDrain),f._readableState.awaitDrain++),f.pause())}function s(t){R("onerror",t),l(),e.removeListener("error",s),0===j(e,"error")&&e.emit("error",t)}function u(){e.removeListener("finish",c),l()}function c(){R("onfinish"),e.removeListener("close",u),l()}function l(){R("unpipe"),f.unpipe(e)}var f=this,h=this._readableState;switch(h.pipesCount){case 0:h.pipes=e;break;case 1:h.pipes=[h.pipes,e];break;default:h.pipes.push(e)}h.pipesCount+=1,R("pipe count=%d opts=%j",h.pipesCount,r);var d=(!r||r.end!==!1)&&e!==t.stdout&&e!==t.stderr,p=d?i:a;h.endEmitted?M(p):f.once("end",p),e.on("unpipe",n);var y=m(f);e.on("drain",y);var g=!1;return f.on("data",o),e._events&&e._events.error?x(e._events.error)?e._events.error.unshift(s):e._events.error=[s,e._events.error]:e.on("error",s),e.once("close",u),e.once("finish",c),e.emit("pipe",f),h.flowing||(R("pipe resume"),f.resume()),e},i.prototype.unpipe=function(e){var t=this._readableState;if(0===t.pipesCount)return this;if(1===t.pipesCount)return e&&e!==t.pipes?this:(e||(e=t.pipes),t.pipes=null,t.pipesCount=0,t.flowing=!1,e&&e.emit("unpipe",this),this);if(!e){var r=t.pipes,n=t.pipesCount;t.pipes=null,t.pipesCount=0,t.flowing=!1;for(var i=0;i<n;i++)r[i].emit("unpipe",this);return this}var a=A(t.pipes,e);return a===-1?this:(t.pipes.splice(a,1),t.pipesCount-=1,1===t.pipesCount&&(t.pipes=t.pipes[0]),e.emit("unpipe",this),this)},i.prototype.on=function(e,t){var r=B.prototype.on.call(this,e,t);if("data"===e&&!1!==this._readableState.flowing&&this.resume(),"readable"===e&&!this._readableState.endEmitted){var n=this._readableState;n.readableListening||(n.readableListening=!0,n.emittedReadable=!1,n.needReadable=!0,n.reading?n.length&&f(this,n):M(y,this))}return r},i.prototype.addListener=i.prototype.on,i.prototype.resume=function(){var e=this._readableState;return e.flowing||(R("resume"),e.flowing=!0,g(this,e)),this},i.prototype.pause=function(){return R("call pause flowing=%j",this._readableState.flowing),!1!==this._readableState.flowing&&(R("pause"),this._readableState.flowing=!1,this.emit("pause")),this},i.prototype.wrap=function(e){var t=this._readableState,r=!1,n=this;e.on("end",function(){if(R("wrapped end"),t.decoder&&!t.ended){var e=t.decoder.end();e&&e.length&&n.push(e)}n.push(null)}),e.on("data",function(i){if(R("wrapped data"),t.decoder&&(i=t.decoder.write(i)),(!t.objectMode||null!==i&&void 0!==i)&&(t.objectMode||i&&i.length)){var a=n.push(i);a||(r=!0,e.pause())}});for(var i in e)void 0===this[i]&&"function"==typeof e[i]&&(this[i]=function(t){return function(){return e[t].apply(e,arguments)}}(i));var a=["error","close","destroy","pause","resume"];return S(a,function(t){e.on(t,n.emit.bind(n,t))}),n._read=function(t){R("wrapped _read",t),r&&(r=!1,e.resume())},n},i._fromList=w}).call(t,r(171))},function(e,t,r){(function(t){"use strict";function r(e,r,n,i){if("function"!=typeof e)throw new TypeError('"callback" argument must be a function');var a,o,s=arguments.length;switch(s){case 0:case 1:return t.nextTick(e);case 2:return t.nextTick(function(){e.call(null,r)});case 3:return t.nextTick(function(){e.call(null,r,n)});case 4:return t.nextTick(function(){e.call(null,r,n,i)});default:for(a=new Array(s-1),o=0;o<a.length;)a[o++]=arguments[o];return t.nextTick(function(){e.apply(null,a)})}}!t.version||0===t.version.indexOf("v0.")||0===t.version.indexOf("v1.")&&0!==t.version.indexOf("v1.8.")?e.exports=r:e.exports=t.nextTick}).call(t,r(171))},function(e,t,r){(function(e){function r(e){return Array.isArray?Array.isArray(e):"[object Array]"===y(e)}function n(e){return"boolean"==typeof e}function i(e){return null===e}function a(e){return null==e}function o(e){return"number"==typeof e}function s(e){return"string"==typeof e}function u(e){return"symbol"==typeof e}function c(e){return void 0===e}function l(e){return"[object RegExp]"===y(e)}function f(e){return"object"==typeof e&&null!==e}function h(e){return"[object Date]"===y(e)}function d(e){return"[object Error]"===y(e)||e instanceof Error}function p(e){return"function"==typeof e}function m(e){return null===e||"boolean"==typeof e||"number"==typeof e||"string"==typeof e||"symbol"==typeof e||"undefined"==typeof e}function y(e){return Object.prototype.toString.call(e)}t.isArray=r,t.isBoolean=n,t.isNull=i,t.isNullOrUndefined=a,t.isNumber=o,t.isString=s,t.isSymbol=u,t.isUndefined=c,t.isRegExp=l,t.isObject=f,t.isDate=h,t.isError=d,t.isFunction=p,t.isPrimitive=m,t.isBuffer=e.isBuffer}).call(t,r(160).Buffer)},165,function(e,t,r){"use strict";function n(e){return this instanceof n?(c.call(this,e),l.call(this,e),e&&e.readable===!1&&(this.readable=!1),e&&e.writable===!1&&(this.writable=!1),this.allowHalfOpen=!0,e&&e.allowHalfOpen===!1&&(this.allowHalfOpen=!1),void this.once("end",i)):new n(e)}function i(){this.allowHalfOpen||this._writableState.ended||s(a,this)}function a(e){e.end()}var o=Object.keys||function(e){var t=[];for(var r in e)t.push(r);return t};e.exports=n;var s=r(193),u=r(194);u.inherits=r(176);var c=r(192),l=r(197);u.inherits(n,c);for(var f=o(l.prototype),h=0;h<f.length;h++){var d=f[h];n.prototype[d]||(n.prototype[d]=l.prototype[d])}},function(e,t,r){(function(t,n){"use strict";function i(){}function a(e,t,r){this.chunk=e,this.encoding=t,this.callback=r,this.next=null}function o(e,t){C=C||r(196),e=e||{},this.objectMode=!!e.objectMode,t instanceof C&&(this.objectMode=this.objectMode||!!e.writableObjectMode);var n=e.highWaterMark,i=this.objectMode?16:16384;this.highWaterMark=n||0===n?n:i,this.highWaterMark=~~this.highWaterMark,this.needDrain=!1,this.ending=!1,this.ended=!1,this.finished=!1;var a=e.decodeStrings===!1;this.decodeStrings=!a,this.defaultEncoding=e.defaultEncoding||"utf8",this.length=0,this.writing=!1,this.corked=0,this.sync=!0,this.bufferProcessing=!1,this.onwrite=function(e){m(t,e)},this.writecb=null,this.writelen=0,this.bufferedRequest=null,this.lastBufferedRequest=null,this.pendingcb=0,this.prefinished=!1,this.errorEmitted=!1,this.bufferedRequestCount=0,this.corkedRequestsFree=new S(this),this.corkedRequestsFree.next=new S(this)}function s(e){return C=C||r(196),this instanceof s||this instanceof C?(this._writableState=new o(e,this),this.writable=!0,e&&("function"==typeof e.write&&(this._write=e.write),"function"==typeof e.writev&&(this._writev=e.writev)),void B.call(this)):new s(e)}function u(e,t){var r=new Error("write after end");e.emit("error",r),A(t,r)}function c(e,t,r,n){var i=!0;if(!x.isBuffer(r)&&"string"!=typeof r&&null!==r&&void 0!==r&&!t.objectMode){var a=new TypeError("Invalid non-string/buffer chunk");e.emit("error",a),A(n,a),i=!1}return i}function l(e,t,r){return e.objectMode||e.decodeStrings===!1||"string"!=typeof t||(t=new x(t,r)),t}function f(e,t,r,n,i){r=l(t,r,n),x.isBuffer(r)&&(n="buffer");var o=t.objectMode?1:r.length;t.length+=o;var s=t.length<t.highWaterMark;if(s||(t.needDrain=!0),t.writing||t.corked){var u=t.lastBufferedRequest;t.lastBufferedRequest=new a(r,n,i),u?u.next=t.lastBufferedRequest:t.bufferedRequest=t.lastBufferedRequest,t.bufferedRequestCount+=1}else h(e,t,!1,o,r,n,i);return s}function h(e,t,r,n,i,a,o){t.writelen=n,t.writecb=o,t.writing=!0,t.sync=!0,r?e._writev(i,t.onwrite):e._write(i,a,t.onwrite),t.sync=!1}function d(e,t,r,n,i){--t.pendingcb,r?A(i,n):i(n),e._writableState.errorEmitted=!0,e.emit("error",n)}function p(e){e.writing=!1,e.writecb=null,e.length-=e.writelen,e.writelen=0}function m(e,t){var r=e._writableState,n=r.sync,i=r.writecb;if(p(r),t)d(e,r,n,t,i);else{var a=b(r);a||r.corked||r.bufferProcessing||!r.bufferedRequest||v(e,r),n?M(y,e,r,a,i):y(e,r,a,i)}}function y(e,t,r,n){r||g(e,t),t.pendingcb--,n(),_(e,t)}function g(e,t){0===t.length&&t.needDrain&&(t.needDrain=!1,e.emit("drain"))}function v(e,t){t.bufferProcessing=!0;var r=t.bufferedRequest;if(e._writev&&r&&r.next){var n=t.bufferedRequestCount,i=new Array(n),a=t.corkedRequestsFree;a.entry=r;for(var o=0;r;)i[o]=r,r=r.next,o+=1;h(e,t,!0,t.length,i,"",a.finish),t.pendingcb++,t.lastBufferedRequest=null,t.corkedRequestsFree=a.next,a.next=null}else{for(;r;){var s=r.chunk,u=r.encoding,c=r.callback,l=t.objectMode?1:s.length;if(h(e,t,!1,l,s,u,c),r=r.next,t.writing)break}null===r&&(t.lastBufferedRequest=null)}t.bufferedRequestCount=0,t.bufferedRequest=r,t.bufferProcessing=!1}function b(e){return e.ending&&0===e.length&&null===e.bufferedRequest&&!e.finished&&!e.writing}function w(e,t){t.prefinished||(t.prefinished=!0,e.emit("prefinish"))}function _(e,t){var r=b(t);return r&&(0===t.pendingcb?(w(e,t),t.finished=!0,e.emit("finish")):w(e,t)),r}function k(e,t,r){t.ending=!0,_(e,t),r&&(t.finished?A(r):e.once("finish",r)),t.ended=!0,e.writable=!1}function S(e){var t=this;this.next=null,this.entry=null,this.finish=function(r){var n=t.entry;for(t.entry=null;n;){var i=n.callback;e.pendingcb--,i(r),n=n.next}e.corkedRequestsFree?e.corkedRequestsFree.next=t:e.corkedRequestsFree=t}}e.exports=s;var A=r(193),M=!t.browser&&["v0.10","v0.9."].indexOf(t.version.slice(0,5))>-1?n:A,x=r(160).Buffer;s.WritableState=o;var E=r(194);E.inherits=r(176);var B,j={deprecate:r(200)};!function(){try{B=r(189)}catch(e){}finally{B||(B=r(190).EventEmitter)}}();var x=r(160).Buffer;E.inherits(s,B);var C;o.prototype.getBuffer=function(){for(var e=this.bufferedRequest,t=[];e;)t.push(e),e=e.next;return t},function(){try{Object.defineProperty(o.prototype,"buffer",{get:j.deprecate(function(){return this.getBuffer()},"_writableState.buffer is deprecated. Use _writableState.getBuffer instead.")})}catch(e){}}();var C;s.prototype.pipe=function(){this.emit("error",new Error("Cannot pipe. Not readable."))},s.prototype.write=function(e,t,r){var n=this._writableState,a=!1;return"function"==typeof t&&(r=t,t=null),x.isBuffer(e)?t="buffer":t||(t=n.defaultEncoding),"function"!=typeof r&&(r=i),n.ended?u(this,r):c(this,n,e,r)&&(n.pendingcb++,a=f(this,n,e,t,r)),a},s.prototype.cork=function(){var e=this._writableState;e.corked++},s.prototype.uncork=function(){var e=this._writableState;e.corked&&(e.corked--,e.writing||e.corked||e.finished||e.bufferProcessing||!e.bufferedRequest||v(this,e))},s.prototype.setDefaultEncoding=function(e){if("string"==typeof e&&(e=e.toLowerCase()),!(["hex","utf8","utf-8","ascii","binary","base64","ucs2","ucs-2","utf16le","utf-16le","raw"].indexOf((e+"").toLowerCase())>-1))throw new TypeError("Unknown encoding: "+e);this._writableState.defaultEncoding=e},s.prototype._write=function(e,t,r){r(new Error("not implemented"))},s.prototype._writev=null,s.prototype.end=function(e,t,r){var n=this._writableState;"function"==typeof e?(r=e,e=null,t=null):"function"==typeof t&&(r=t,t=null),null!==e&&void 0!==e&&this.write(e,t),n.corked&&(n.corked=1,this.uncork()),n.ending||n.finished||k(this,n,r)}}).call(t,r(171),r(198).setImmediate)},function(e,t,r){(function(e){function n(e,t){this._id=e,this._clearFn=t}var i="undefined"!=typeof e&&e||"undefined"!=typeof self&&self||window,a=Function.prototype.apply;t.setTimeout=function(){return new n(a.call(setTimeout,i,arguments),clearTimeout)},t.setInterval=function(){return new n(a.call(setInterval,i,arguments),clearInterval)},t.clearTimeout=t.clearInterval=function(e){e&&e.close()},n.prototype.unref=n.prototype.ref=function(){},n.prototype.close=function(){this._clearFn.call(i,this._id)},t.enroll=function(e,t){clearTimeout(e._idleTimeoutId),e._idleTimeout=t},t.unenroll=function(e){clearTimeout(e._idleTimeoutId),e._idleTimeout=-1},t._unrefActive=t.active=function(e){clearTimeout(e._idleTimeoutId);var t=e._idleTimeout;t>=0&&(e._idleTimeoutId=setTimeout(function(){
e._onTimeout&&e._onTimeout()},t))},r(199),t.setImmediate="undefined"!=typeof self&&self.setImmediate||"undefined"!=typeof e&&e.setImmediate||this&&this.setImmediate,t.clearImmediate="undefined"!=typeof self&&self.clearImmediate||"undefined"!=typeof e&&e.clearImmediate||this&&this.clearImmediate}).call(t,function(){return this}())},function(e,t,r){(function(e,t){!function(e,r){"use strict";function n(e){"function"!=typeof e&&(e=new Function(""+e));for(var t=new Array(arguments.length-1),r=0;r<t.length;r++)t[r]=arguments[r+1];var n={callback:e,args:t};return m[p]=n,d(p),p++}function i(e){delete m[e]}function a(e){var t=e.callback,n=e.args;switch(n.length){case 0:t();break;case 1:t(n[0]);break;case 2:t(n[0],n[1]);break;case 3:t(n[0],n[1],n[2]);break;default:t.apply(r,n)}}function o(e){if(y)setTimeout(o,0,e);else{var t=m[e];if(t){y=!0;try{a(t)}finally{i(e),y=!1}}}}function s(){d=function(e){t.nextTick(function(){o(e)})}}function u(){if(e.postMessage&&!e.importScripts){var t=!0,r=e.onmessage;return e.onmessage=function(){t=!1},e.postMessage("","*"),e.onmessage=r,t}}function c(){var t="setImmediate$"+Math.random()+"$",r=function(r){r.source===e&&"string"==typeof r.data&&0===r.data.indexOf(t)&&o(+r.data.slice(t.length))};e.addEventListener?e.addEventListener("message",r,!1):e.attachEvent("onmessage",r),d=function(r){e.postMessage(t+r,"*")}}function l(){var e=new MessageChannel;e.port1.onmessage=function(e){var t=e.data;o(t)},d=function(t){e.port2.postMessage(t)}}function f(){var e=g.documentElement;d=function(t){var r=g.createElement("script");r.onreadystatechange=function(){o(t),r.onreadystatechange=null,e.removeChild(r),r=null},e.appendChild(r)}}function h(){d=function(e){setTimeout(o,0,e)}}if(!e.setImmediate){var d,p=1,m={},y=!1,g=e.document,v=Object.getPrototypeOf&&Object.getPrototypeOf(e);v=v&&v.setTimeout?v:e,"[object process]"==={}.toString.call(e.process)?s():u()?c():e.MessageChannel?l():g&&"onreadystatechange"in g.createElement("script")?f():h(),v.setImmediate=n,v.clearImmediate=i}}("undefined"==typeof self?"undefined"==typeof e?this:e:self)}).call(t,function(){return this}(),r(171))},function(e,t){(function(t){function r(e,t){function r(){if(!i){if(n("throwDeprecation"))throw new Error(t);n("traceDeprecation")?console.trace(t):console.warn(t),i=!0}return e.apply(this,arguments)}if(n("noDeprecation"))return e;var i=!1;return r}function n(e){try{if(!t.localStorage)return!1}catch(e){return!1}var r=t.localStorage[e];return null!=r&&"true"===String(r).toLowerCase()}e.exports=r}).call(t,function(){return this}())},function(e,t,r){function n(e){if(e&&!u(e))throw new Error("Unknown encoding: "+e)}function i(e){return e.toString(this.encoding)}function a(e){this.charReceived=e.length%2,this.charLength=this.charReceived?2:0}function o(e){this.charReceived=e.length%3,this.charLength=this.charReceived?3:0}var s=r(160).Buffer,u=s.isEncoding||function(e){switch(e&&e.toLowerCase()){case"hex":case"utf8":case"utf-8":case"ascii":case"binary":case"base64":case"ucs2":case"ucs-2":case"utf16le":case"utf-16le":case"raw":return!0;default:return!1}},c=t.StringDecoder=function(e){switch(this.encoding=(e||"utf8").toLowerCase().replace(/[-_]/,""),n(e),this.encoding){case"utf8":this.surrogateSize=3;break;case"ucs2":case"utf16le":this.surrogateSize=2,this.detectIncompleteChar=a;break;case"base64":this.surrogateSize=3,this.detectIncompleteChar=o;break;default:return void(this.write=i)}this.charBuffer=new s(6),this.charReceived=0,this.charLength=0};c.prototype.write=function(e){for(var t="";this.charLength;){var r=e.length>=this.charLength-this.charReceived?this.charLength-this.charReceived:e.length;if(e.copy(this.charBuffer,this.charReceived,0,r),this.charReceived+=r,this.charReceived<this.charLength)return"";e=e.slice(r,e.length),t=this.charBuffer.slice(0,this.charLength).toString(this.encoding);var n=t.charCodeAt(t.length-1);if(!(n>=55296&&n<=56319)){if(this.charReceived=this.charLength=0,0===e.length)return t;break}this.charLength+=this.surrogateSize,t=""}this.detectIncompleteChar(e);var i=e.length;this.charLength&&(e.copy(this.charBuffer,0,e.length-this.charReceived,i),i-=this.charReceived),t+=e.toString(this.encoding,0,i);var i=t.length-1,n=t.charCodeAt(i);if(n>=55296&&n<=56319){var a=this.surrogateSize;return this.charLength+=a,this.charReceived+=a,this.charBuffer.copy(this.charBuffer,a,0,a),e.copy(this.charBuffer,0,0,a),t.substring(0,i)}return t},c.prototype.detectIncompleteChar=function(e){for(var t=e.length>=3?3:e.length;t>0;t--){var r=e[e.length-t];if(1==t&&r>>5==6){this.charLength=2;break}if(t<=2&&r>>4==14){this.charLength=3;break}if(t<=3&&r>>3==30){this.charLength=4;break}}this.charReceived=t},c.prototype.end=function(e){var t="";if(e&&e.length&&(t=this.write(e)),this.charReceived){var r=this.charReceived,n=this.charBuffer,i=this.encoding;t+=n.slice(0,r).toString(i)}return t}},function(e,t,r){"use strict";function n(e){this.afterTransform=function(t,r){return i(e,t,r)},this.needTransform=!1,this.transforming=!1,this.writecb=null,this.writechunk=null,this.writeencoding=null}function i(e,t,r){var n=e._transformState;n.transforming=!1;var i=n.writecb;if(!i)return e.emit("error",new Error("no writecb in Transform class"));n.writechunk=null,n.writecb=null,null!==r&&void 0!==r&&e.push(r),i(t);var a=e._readableState;a.reading=!1,(a.needReadable||a.length<a.highWaterMark)&&e._read(a.highWaterMark)}function a(e){if(!(this instanceof a))return new a(e);s.call(this,e),this._transformState=new n(this);var t=this;this._readableState.needReadable=!0,this._readableState.sync=!1,e&&("function"==typeof e.transform&&(this._transform=e.transform),"function"==typeof e.flush&&(this._flush=e.flush)),this.once("prefinish",function(){"function"==typeof this._flush?this._flush(function(e){o(t,e)}):o(t)})}function o(e,t){if(t)return e.emit("error",t);var r=e._writableState,n=e._transformState;if(r.length)throw new Error("calling transform done when ws.length != 0");if(n.transforming)throw new Error("calling transform done when still transforming");return e.push(null)}e.exports=a;var s=r(196),u=r(194);u.inherits=r(176),u.inherits(a,s),a.prototype.push=function(e,t){return this._transformState.needTransform=!1,s.prototype.push.call(this,e,t)},a.prototype._transform=function(e,t,r){throw new Error("not implemented")},a.prototype._write=function(e,t,r){var n=this._transformState;if(n.writecb=r,n.writechunk=e,n.writeencoding=t,!n.transforming){var i=this._readableState;(n.needTransform||i.needReadable||i.length<i.highWaterMark)&&this._read(i.highWaterMark)}},a.prototype._read=function(e){var t=this._transformState;null!==t.writechunk&&t.writecb&&!t.transforming?(t.transforming=!0,this._transform(t.writechunk,t.writeencoding,t.afterTransform)):t.needTransform=!0}},function(e,t,r){"use strict";function n(e){return this instanceof n?void i.call(this,e):new n(e)}e.exports=n;var i=r(202),a=r(194);a.inherits=r(176),a.inherits(n,i),n.prototype._transform=function(e,t,r){r(null,e)}},function(e,t,r){e.exports=r(197)},function(e,t,r){e.exports=r(196)},function(e,t,r){e.exports=r(202)},function(e,t,r){e.exports=r(203)},function(e,t){t["aes-128-ecb"]={cipher:"AES",key:128,iv:0,mode:"ECB",type:"block"},t["aes-192-ecb"]={cipher:"AES",key:192,iv:0,mode:"ECB",type:"block"},t["aes-256-ecb"]={cipher:"AES",key:256,iv:0,mode:"ECB",type:"block"},t["aes-128-cbc"]={cipher:"AES",key:128,iv:16,mode:"CBC",type:"block"},t["aes-192-cbc"]={cipher:"AES",key:192,iv:16,mode:"CBC",type:"block"},t["aes-256-cbc"]={cipher:"AES",key:256,iv:16,mode:"CBC",type:"block"},t.aes128=t["aes-128-cbc"],t.aes192=t["aes-192-cbc"],t.aes256=t["aes-256-cbc"],t["aes-128-cfb"]={cipher:"AES",key:128,iv:16,mode:"CFB",type:"stream"},t["aes-192-cfb"]={cipher:"AES",key:192,iv:16,mode:"CFB",type:"stream"},t["aes-256-cfb"]={cipher:"AES",key:256,iv:16,mode:"CFB",type:"stream"},t["aes-128-ofb"]={cipher:"AES",key:128,iv:16,mode:"OFB",type:"stream"},t["aes-192-ofb"]={cipher:"AES",key:192,iv:16,mode:"OFB",type:"stream"},t["aes-256-ofb"]={cipher:"AES",key:256,iv:16,mode:"OFB",type:"stream"},t["aes-128-ctr"]={cipher:"AES",key:128,iv:16,mode:"CTR",type:"stream"},t["aes-192-ctr"]={cipher:"AES",key:192,iv:16,mode:"CTR",type:"stream"},t["aes-256-ctr"]={cipher:"AES",key:256,iv:16,mode:"CTR",type:"stream"}},function(e,t,r){(function(t){e.exports=function(e,r,n,i){n/=8,i=i||0;for(var a,o,s,u=0,c=0,l=new t(n),f=new t(i),h=0;;){if(a=e.createHash("md5"),h++>0&&a.update(o),a.update(r),o=a.digest(),s=0,n>0)for(;;){if(0===n)break;if(s===o.length)break;l[u++]=o[s],n--,s++}if(i>0&&s!==o.length)for(;;){if(0===i)break;if(s===o.length)break;f[c++]=o[s],i--,s++}if(0===n&&0===i)break}for(s=0;s<o.length;s++)o[s]=0;return{key:l,iv:f}}}).call(t,r(160).Buffer)},function(e,t,r){(function(t){function n(e,r,o,s){return this instanceof n?(a.call(this),this._cipher=new i.AES(r),this._prev=new t(o.length),this._cache=new t(""),this._secCache=new t(""),this._decrypt=s,o.copy(this._prev),void(this._mode=e)):new n(e,r,o)}var i=r(187),a=r(188),o=r(176);o(n,a),e.exports=n,n.prototype._transform=function(e,t,r){r(null,this._mode.encrypt(this,e,this._decrypt))},n.prototype._flush=function(e){this._cipher.scrub(),e()}}).call(t,r(160).Buffer)},function(e,t){t.encrypt=function(e,t){return e._cipher.encryptBlock(t)},t.decrypt=function(e,t){return e._cipher.decryptBlock(t)}},function(e,t,r){var n=r(213);t.encrypt=function(e,t){var r=n(t,e._prev);return e._prev=e._cipher.encryptBlock(r),e._prev},t.decrypt=function(e,t){var r=e._prev;e._prev=t;var i=e._cipher.decryptBlock(t);return n(i,r)}},function(e,t,r){(function(t){function r(e,r){for(var n=Math.min(e.length,r.length),i=new t(n),a=-1;++a<n;)i.writeUInt8(e[a]^r[a],a);return i}e.exports=r}).call(t,r(160).Buffer)},function(e,t,r){(function(e){function n(t,r,n){var a=r.length,o=i(r,t._cache);return t._cache=t._cache.slice(a),t._prev=e.concat([t._prev,n?r:o]),o}var i=r(213);t.encrypt=function(t,r,i){for(var a,o=new e("");r.length;){if(0===t._cache.length&&(t._cache=t._cipher.encryptBlock(t._prev),t._prev=new e("")),!(t._cache.length<=r.length)){o=e.concat([o,n(t,r,i)]);break}a=t._cache.length,o=e.concat([o,n(t,r.slice(0,a),i)]),r=r.slice(a)}return o}}).call(t,r(160).Buffer)},function(e,t,r){(function(e){function n(e){return e._prev=e._cipher.encryptBlock(e._prev),e._prev}var i=r(213);t.encrypt=function(t,r){for(;t._cache.length<r.length;)t._cache=e.concat([t._cache,n(t)]);var a=t._cache.slice(0,r.length);return t._cache=t._cache.slice(r.length),i(r,a)}}).call(t,r(160).Buffer)},function(e,t,r){(function(e){function n(e){var t=e._cipher.encryptBlock(e._prev);return i(e._prev),t}function i(e){for(var t,r=e.length;r--;){if(t=e.readUInt8(r),255!==t){t++,e.writeUInt8(t,r);break}e.writeUInt8(0,r)}}var a=r(213);t.encrypt=function(t,r){for(;t._cache.length<r.length;)t._cache=e.concat([t._cache,n(t)]);var i=t._cache.slice(0,r.length);return t._cache=t._cache.slice(r.length),a(r,i)}}).call(t,r(160).Buffer)},function(e,t,r){(function(t){function n(e,r,a){return this instanceof n?(s.call(this),this._cache=new i,this._last=void 0,this._cipher=new o.AES(r),this._prev=new t(a.length),a.copy(this._prev),void(this._mode=e)):new n(e,r,a)}function i(){return this instanceof i?void(this.cache=new t("")):new i}function a(e){var t=e[15];if(16!==t)return e.slice(0,16-t)}var o=r(187),s=r(188),u=r(176),c=r(208),l=r(210),f=r(209);u(n,s),n.prototype._transform=function(e,t,r){this._cache.add(e);for(var n,i;n=this._cache.get();)i=this._mode.decrypt(this,n),this.push(i);r()},n.prototype._flush=function(e){var t=this._cache.flush();return t?(this.push(a(this._mode.decrypt(this,t))),void e()):e},i.prototype.add=function(e){this.cache=t.concat([this.cache,e])},i.prototype.get=function(){if(this.cache.length>16){var e=this.cache.slice(0,16);return this.cache=this.cache.slice(16),e}return null},i.prototype.flush=function(){if(this.cache.length)return this.cache};var h={ECB:r(211),CBC:r(212),CFB:r(214),OFB:r(215),CTR:r(216)};e.exports=function(e){function r(e,r,i){var a=c[e];if(!a)throw new TypeError("invalid suite type");if("string"==typeof i&&(i=new t(i)),"string"==typeof r&&(r=new t(r)),r.length!==a.key/8)throw new TypeError("invalid key length "+r.length);if(i.length!==a.iv)throw new TypeError("invalid iv length "+i.length);return"stream"===a.type?new l(h[a.mode],r,i,!0):new n(h[a.mode],r,i)}function i(t,n){var i=c[t];if(!i)throw new TypeError("invalid suite type");var a=f(e,n,i.key,i.iv);return r(t,a.key,a.iv)}return{createDecipher:i,createDecipheriv:r}}}).call(t,r(160).Buffer)},function(e,t,r){(function(e){"use strict";e.exports=["jarjan","mahdif","sprayaga","ruzinav","Skyhartman","moscoz","kurafire","91bilal","igorgarybaldi","calebogden","malykhinv","joelhelin","kushsolitary","coreyweb","snowshade","areus","holdenweb","heyimjuani","envex","unterdreht","collegeman","peejfancher","andyisonline","ultragex","fuck_you_two","adellecharles","ateneupopular","ahmetalpbalkan","Stievius","kerem","osvaldas","angelceballos","thierrykoblentz","peterlandt","catarino","wr","weglov","brandclay","flame_kaizar","ahmetsulek","nicolasfolliot","jayrobinson","victorerixon","kolage","michzen","markjenkins","nicolai_larsen","gt","noxdzine","alagoon","idiot","mizko","chadengle","mutlu82","simobenso","vocino","guiiipontes","soyjavi","joshaustin","tomaslau","VinThomas","ManikRathee","langate","cemshid","leemunroe","_shahedk","enda","BillSKenney","divya","joshhemsley","sindresorhus","soffes","9lessons","linux29","Chakintosh","anaami","joreira","shadeed9","scottkclark","jedbridges","salleedesign","marakasina","ariil","BrianPurkiss","michaelmartinho","bublienko","devankoshal","ZacharyZorbas","timmillwood","joshuasortino","damenleeturks","tomas_janousek","herrhaase","RussellBishop","brajeshwar","nachtmeister","cbracco","bermonpainter","abdullindenis","isacosta","suprb","yalozhkin","chandlervdw","iamgarth","_victa","commadelimited","roybarberuk","axel","vladarbatov","ffbel","syropian","ankitind","traneblow","flashmurphy","ChrisFarina78","baliomega","saschamt","jm_denis","anoff","kennyadr","chatyrko","dingyi","mds","terryxlife","aaroni","kinday","prrstn","eduardostuart","dhilipsiva","GavicoInd","baires","rohixx","bigmancho","blakesimkins","leeiio","tjrus","uberschizo","kylefoundry","claudioguglieri","ripplemdk","exentrich","jakemoore","joaoedumedeiros","poormini","tereshenkov","keryilmaz","haydn_woods","rude","llun","sgaurav_baghel","jamiebrittain","badlittleduck","pifagor","agromov","benefritz","erwanhesry","diesellaws","jeremiaha","koridhandy","chaensel","andrewcohen","smaczny","gonzalorobaina","nandini_m","sydlawrence","cdharrison","tgerken","lewisainslie","charliecwaite","robbschiller","flexrs","mattdetails","raquelwilson","karsh","mrmartineau","opnsrce","hgharrygo","maximseshuk","uxalex","samihah","chanpory","sharvin","josemarques","jefffis","krystalfister","lokesh_coder","thedamianhdez","dpmachado","funwatercat","timothycd","ivanfilipovbg","picard102","marcobarbosa","krasnoukhov","g3d","ademilter","rickdt","operatino","bungiwan","hugomano","logorado","dc_user","horaciobella","SlaapMe","teeragit","iqonicd","ilya_pestov","andrewarrow","ssiskind","stan","HenryHoffman","rdsaunders","adamsxu","curiousoffice","themadray","michigangraham","kohette","nickfratter","runningskull","madysondesigns","brenton_clarke","jennyshen","bradenhamm","kurtinc","amanruzaini","coreyhaggard","Karimmove","aaronalfred","wtrsld","jitachi","therealmarvin","pmeissner","ooomz","chacky14","jesseddy","thinmatt","shanehudson","akmur","IsaryAmairani","arthurholcombe1","andychipster","boxmodel","ehsandiary","LucasPerdidao","shalt0ni","swaplord","kaelifa","plbabin","guillemboti","arindam_","renbyrd","thiagovernetti","jmillspaysbills","mikemai2awesome","jervo","mekal","sta1ex","robergd","felipecsl","andrea211087","garand","dhooyenga","abovefunction","pcridesagain","randomlies","BryanHorsey","heykenneth","dahparra","allthingssmitty","danvernon","beweinreich","increase","falvarad","alxndrustinov","souuf","orkuncaylar","AM_Kn2","gearpixels","bassamology","vimarethomas","kosmar","SULiik","mrjamesnoble","silvanmuhlemann","shaneIxD","nacho","yigitpinarbasi","buzzusborne","aaronkwhite","rmlewisuk","giancarlon","nbirckel","d_nny_m_cher","sdidonato","atariboy","abotap","karalek","psdesignuk","ludwiczakpawel","nemanjaivanovic","baluli","ahmadajmi","vovkasolovev","samgrover","derienzo777","jonathansimmons","nelsonjoyce","S0ufi4n3","xtopherpaul","oaktreemedia","nateschulte","findingjenny","namankreative","antonyzotov","we_social","leehambley","solid_color","abelcabans","mbilderbach","kkusaa","jordyvdboom","carlosgavina","pechkinator","vc27","rdbannon","croakx","suribbles","kerihenare","catadeleon","gcmorley","duivvv","saschadroste","victorDubugras","wintopia","mattbilotti","taylorling","megdraws","meln1ks","mahmoudmetwally","Silveredge9","derekebradley","happypeter1983","travis_arnold","artem_kostenko","adobi","daykiine","alek_djuric","scips","miguelmendes","justinrhee","alsobrooks","fronx","mcflydesign","santi_urso","allfordesign","stayuber","bertboerland","marosholly","adamnac","cynthiasavard","muringa","danro","hiemil","jackiesaik","zacsnider","iduuck","antjanus","aroon_sharma","dshster","thehacker","michaelbrooksjr","ryanmclaughlin","clubb3rry","taybenlor","xripunov","myastro","adityasutomo","digitalmaverick","hjartstrorn","itolmach","vaughanmoffitt","abdots","isnifer","sergeysafonov","maz","scrapdnb","chrismj83","vitorleal","sokaniwaal","zaki3d","illyzoren","mocabyte","osmanince","djsherman","davidhemphill","waghner","necodymiconer","praveen_vijaya","fabbrucci","cliffseal","travishines","kuldarkalvik","Elt_n","phillapier","okseanjay","id835559","kudretkeskin","anjhero","duck4fuck","scott_riley","noufalibrahim","h1brd","borges_marcos","devinhalladay","ciaranr","stefooo","mikebeecham","tonymillion","joshuaraichur","irae","petrangr","dmitriychuta","charliegann","arashmanteghi","adhamdannaway","ainsleywagon","svenlen","faisalabid","beshur","carlyson","dutchnadia","teddyzetterlund","samuelkraft","aoimedia","toddrew","codepoet_ru","artvavs","benoitboucart","jomarmen","kolmarlopez","creartinc","homka","gaborenton","robinclediere","maximsorokin","plasticine","j2deme","peachananr","kapaluccio","de_ascanio","rikas","dawidwu","marcoramires","angelcreative","rpatey","popey","rehatkathuria","the_purplebunny","1markiz","ajaxy_ru","brenmurrell","dudestein","oskarlevinson","victorstuber","nehfy","vicivadeline","leandrovaranda","scottgallant","victor_haydin","sawrb","ryhanhassan","amayvs","a_brixen","karolkrakowiak_","herkulano","geran7","cggaurav","chris_witko","lososina","polarity","mattlat","brandonburke","constantx","teylorfeliz","craigelimeliah","rachelreveley","reabo101","rahmeen","ky","rickyyean","j04ntoh","spbroma","sebashton","jpenico","francis_vega","oktayelipek","kikillo","fabbianz","larrygerard","BroumiYoussef","0therplanet","mbilalsiddique1","ionuss","grrr_nl","liminha","rawdiggie","ryandownie","sethlouey","pixage","arpitnj","switmer777","josevnclch","kanickairaj","puzik","tbakdesigns","besbujupi","supjoey","lowie","linkibol","balintorosz","imcoding","agustincruiz","gusoto","thomasschrijer","superoutman","kalmerrautam","gabrielizalo","gojeanyn","davidbaldie","_vojto","laurengray","jydesign","mymyboy","nellleo","marciotoledo","ninjad3m0","to_soham","hasslunsford","muridrahhal","levisan","grahamkennery","lepetitogre","antongenkin","nessoila","amandabuzard","safrankov","cocolero","dss49","matt3224","bluesix","quailandquasar","AlbertoCococi","lepinski","sementiy","mhudobivnik","thibaut_re","olgary","shojberg","mtolokonnikov","bereto","naupintos","wegotvices","xadhix","macxim","rodnylobos","madcampos","madebyvadim","bartoszdawydzik","supervova","markretzloff","vonachoo","darylws","stevedesigner","mylesb","herbigt","depaulawagner","geshan","gizmeedevil1991","_scottburgess","lisovsky","davidsasda","artd_sign","YoungCutlass","mgonto","itstotallyamy","victorquinn","osmond","oksanafrewer","zauerkraut","iamkeithmason","nitinhayaran","lmjabreu","mandalareopens","thinkleft","ponchomendivil","juamperro","brunodesign1206","caseycavanagh","luxe","dotgridline","spedwig","madewulf","mattsapii","helderleal","chrisstumph","jayphen","nsamoylov","chrisvanderkooi","justme_timothyg","otozk","prinzadi","gu5taf","cyril_gaillard","d_kobelyatsky","daniloc","nwdsha","romanbulah","skkirilov","dvdwinden","dannol","thekevinjones","jwalter14","timgthomas","buddhasource","uxpiper","thatonetommy","diansigitp","adrienths","klimmka","gkaam","derekcramer","jennyyo","nerrsoft","xalionmalik","edhenderson","keyuri85","roxanejammet","kimcool","edkf","matkins","alessandroribe","jacksonlatka","lebronjennan","kostaspt","karlkanall","moynihan","danpliego","saulihirvi","wesleytrankin","fjaguero","bowbrick","mashaaaaal","yassiryahya","dparrelli","fotomagin","aka_james","denisepires","iqbalperkasa","martinansty","jarsen","r_oy","justinrob","gabrielrosser","malgordon","carlfairclough","michaelabehsera","pierrestoffe","enjoythetau","loganjlambert","rpeezy","coreyginnivan","michalhron","msveet","lingeswaran","kolsvein","peter576","reideiredale","joeymurdah","raphaelnikson","mvdheuvel","maxlinderman","jimmuirhead","begreative","frankiefreesbie","robturlinckx","Talbi_ConSept","longlivemyword","vanchesz","maiklam","hermanobrother","rez___a","gregsqueeb","greenbes","_ragzor","anthonysukow","fluidbrush","dactrtr","jehnglynn","bergmartin","hugocornejo","_kkga","dzantievm","sawalazar","sovesove","jonsgotwood","byryan","vytautas_a","mizhgan","cicerobr","nilshelmersson","d33pthought","davecraige","nckjrvs","alexandermayes","jcubic","craigrcoles","bagawarman","rob_thomas10","cofla","maikelk","rtgibbons","russell_baylis","mhesslow","codysanfilippo","webtanya","madebybrenton","dcalonaci","perfectflow","jjsiii","saarabpreet","kumarrajan12123","iamsteffen","themikenagle","ceekaytweet","larrybolt","conspirator","dallasbpeters","n3dmax","terpimost","kirillz","byrnecore","j_drake_","calebjoyce","russoedu","hoangloi","tobysaxon","gofrasdesign","dimaposnyy","tjisousa","okandungel","billyroshan","oskamaya","motionthinks","knilob","ashocka18","marrimo","bartjo","omnizya","ernestsemerda","andreas_pr","edgarchris99","thomasgeisen","gseguin","joannefournier","demersdesigns","adammarsbar","nasirwd","n_tassone","javorszky","themrdave","yecidsm","nicollerich","canapud","nicoleglynn","judzhin_miles","designervzm","kianoshp","evandrix","alterchuca","dhrubo","ma_tiax","ssbb_me","dorphern","mauriolg","bruno_mart","mactopus","the_winslet","joemdesign","Shriiiiimp","jacobbennett","nfedoroff","iamglimy","allagringaus","aiiaiiaii","olaolusoga","buryaknick","wim1k","nicklacke","a1chapone","steynviljoen","strikewan","ryankirkman","andrewabogado","doooon","jagan123","ariffsetiawan","elenadissi","mwarkentin","thierrymeier_","r_garcia","dmackerman","borantula","konus","spacewood_","ryuchi311","evanshajed","tristanlegros","shoaib253","aislinnkelly","okcoker","timpetricola","sunshinedgirl","chadami","aleclarsoniv","nomidesigns","petebernardo","scottiedude","millinet","imsoper","imammuht","benjamin_knight","nepdud","joki4","lanceguyatt","bboy1895","amywebbb","rweve","haruintesettden","ricburton","nelshd","batsirai","primozcigler","jffgrdnr","8d3k","geneseleznev","al_li","souperphly","mslarkina","2fockus","cdavis565","xiel","turkutuuli","uxward","lebinoclard","gauravjassal","davidmerrique","mdsisto","andrewofficer","kojourin","dnirmal","kevka","mr_shiznit","aluisio_azevedo","cloudstudio","danvierich","alexivanichkin","fran_mchamy","perretmagali","betraydan","cadikkara","matbeedotcom","jeremyworboys","bpartridge","michaelkoper","silv3rgvn","alevizio","johnsmithagency","lawlbwoy","vitor376","desastrozo","thimo_cz","jasonmarkjones","lhausermann","xravil","guischmitt","vigobronx","panghal0","miguelkooreman","surgeonist","christianoliff","caspergrl","iamkarna","ipavelek","pierre_nel","y2graphic","sterlingrules","elbuscainfo","bennyjien","stushona","estebanuribe","embrcecreations","danillos","elliotlewis","charlesrpratt","vladyn","emmeffess","carlosblanco_eu","leonfedotov","rangafangs","chris_frees","tgormtx","bryan_topham","jpscribbles","mighty55","carbontwelve","isaacfifth","iamjdeleon","snowwrite","barputro","drewbyreese","sachacorazzi","bistrianiosip","magoo04","pehamondello","yayteejay","a_harris88","algunsanabria","zforrester","ovall","carlosjgsousa","geobikas","ah_lice","looneydoodle","nerdgr8","ddggccaa","zackeeler","normanbox","el_fuertisimo","ismail_biltagi","juangomezw","jnmnrd","patrickcoombe","ryanjohnson_me","markolschesky","jeffgolenski","kvasnic","lindseyzilla","gauchomatt","afusinatto","kevinoh","okansurreel","adamawesomeface","emileboudeling","arishi_","juanmamartinez","wikiziner","danthms","mkginfo","terrorpixel","curiousonaut","prheemo","michaelcolenso","foczzi","martip07","thaodang17","johncafazza","robinlayfield","franciscoamk","abdulhyeuk","marklamb","edobene","andresenfredrik","mikaeljorhult","chrisslowik","vinciarts","meelford","elliotnolten","yehudab","vijaykarthik","bfrohs","josep_martins","attacks","sur4dye","tumski","instalox","mangosango","paulfarino","kazaky999","kiwiupover","nvkznemo","tom_even","ratbus","woodsman001","joshmedeski","thewillbeard","psaikali","joe_black","aleinadsays","marcusgorillius","hota_v","jghyllebert","shinze","janpalounek","jeremiespoken","her_ruu","dansowter","felipeapiress","magugzbrand2d","posterjob","nathalie_fs","bobbytwoshoes","dreizle","jeremymouton","elisabethkjaer","notbadart","mohanrohith","jlsolerdeltoro","itskawsar","slowspock","zvchkelly","wiljanslofstra","craighenneberry","trubeatto","juaumlol","samscouto","BenouarradeM","gipsy_raf","netonet_il","arkokoley","itsajimithing","smalonso","victordeanda","_dwite_","richardgarretts","gregrwilkinson","anatolinicolae","lu4sh1i","stefanotirloni","ostirbu","darcystonge","naitanamoreno","michaelcomiskey","adhiardana","marcomano_","davidcazalis","falconerie","gregkilian","bcrad","bolzanmarco","low_res","vlajki","petar_prog","jonkspr","akmalfikri","mfacchinello","atanism","harry_sistalam","murrayswift","bobwassermann","gavr1l0","madshensel","mr_subtle","deviljho_","salimianoff","joetruesdell","twittypork","airskylar","dnezkumar","dgajjar","cherif_b","salvafc","louis_currie","deeenright","cybind","eyronn","vickyshits","sweetdelisa","cboller1","andresdjasso","melvindidit","andysolomon","thaisselenator_","lvovenok","giuliusa","belyaev_rs","overcloacked","kamal_chaneman","incubo82","hellofeverrrr","mhaligowski","sunlandictwin","bu7921","andytlaw","jeremery","finchjke","manigm","umurgdk","scottfeltham","ganserene","mutu_krish","jodytaggart","ntfblog","tanveerrao","hfalucas","alxleroydeval","kucingbelang4","bargaorobalo","colgruv","stalewine","kylefrost","baumannzone","angelcolberg","sachingawas","jjshaw14","ramanathan_pdy","johndezember","nilshoenson","brandonmorreale","nutzumi","brandonflatsoda","sergeyalmone","klefue","kirangopal","baumann_alex","matthewkay_","jay_wilburn","shesgared","apriendeau","johnriordan","wake_gs","aleksitappura","emsgulam","xilantra","imomenui","sircalebgrove","newbrushes","hsinyo23","m4rio","katiemdaly","s4f1","ecommerceil","marlinjayakody","swooshycueb","sangdth","coderdiaz","bluefx_","vivekprvr","sasha_shestakov","eugeneeweb","dgclegg","n1ght_coder","dixchen","blakehawksworth","trueblood_33","hai_ninh_nguyen","marclgonzales","yesmeck","stephcoue","doronmalki","ruehldesign","anasnakawa","kijanmaharjan","wearesavas","stefvdham","tweetubhai","alecarpentier","fiterik","antonyryndya","d00maz","theonlyzeke","missaaamy","carlosm","manekenthe","reetajayendra","jeremyshimko","justinrgraham","stefanozoffoli","overra","mrebay007","shvelo96","pyronite","thedjpetersen","rtyukmaev","_williamguerra","albertaugustin","vikashpathak18","kevinjohndayy","vj_demien","colirpixoil","goddardlewis","laasli","jqiuss","heycamtaylor","nastya_mane","mastermindesign","ccinojasso1","nyancecom","sandywoodruff","bighanddesign","sbtransparent","aviddayentonbay","richwild","kaysix_dizzy","tur8le","seyedhossein1","privetwagner","emmandenn","dev_essentials","jmfsocial","_yardenoon","mateaodviteza","weavermedia","mufaddal_mw","hafeeskhan","ashernatali","sulaqo","eddiechen","josecarlospsh","vm_f","enricocicconi","danmartin70","gmourier","donjain","mrxloka","_pedropinho","eitarafa","oscarowusu","ralph_lam","panchajanyag","woodydotmx","jerrybai1907","marshallchen_","xamorep","aio___","chaabane_wail","txcx","akashsharma39","falling_soul","sainraja","mugukamil","johannesneu","markwienands","karthipanraj","balakayuriy","alan_zhang_","layerssss","kaspernordkvist","mirfanqureshi","hanna_smi","VMilescu","aeon56","m_kalibry","sreejithexp","dicesales","dhoot_amit","smenov","lonesomelemon","vladimirdevic","joelcipriano","haligaliharun","buleswapnil","serefka","ifarafonow","vikasvinfotech","urrutimeoli","areandacom"]}).call(t,r(56)(e))},function(e,t,r){(function(e){"use strict";e.exports=["com","net","org","biz","info","eu","co"]}).call(t,r(56)(e))},function(e,t,r){"use strict";e.exports={word:r(221),supplemental:r(222),sentence:function(){var e=this.random.number(3,10),t=[];for(e;e>0;e--)t.push(this.lorem.word());return this.capitalize(t.join(" "))+"."},paragraph:function(){var e=this.random.number(3,6),t=[];for(e;e>0;e--)t.push(this.lorem.sentence());return t.join(" ")}}},function(e,t,r){(function(e){"use strict";e.exports=["alias","consequatur","aut","perferendis","sit","voluptatem","accusantium","doloremque","aperiam","eaque","ipsa","quae","ab","illo","inventore","veritatis","et","quasi","architecto","beatae","vitae","dicta","sunt","explicabo","aspernatur","aut","odit","aut","fugit","sed","quia","consequuntur","magni","dolores","eos","qui","ratione","voluptatem","sequi","nesciunt","neque","dolorem","ipsum","quia","dolor","sit","amet","consectetur","adipisci","velit","sed","quia","non","numquam","eius","modi","tempora","incidunt","ut","labore","et","dolore","magnam","aliquam","quaerat","voluptatem","ut","enim","ad","minima","veniam","quis","nostrum","exercitationem","ullam","corporis","nemo","enim","ipsam","voluptatem","quia","voluptas","sit","suscipit","laboriosam","nisi","ut","aliquid","ex","ea","commodi","consequatur","quis","autem","vel","eum","iure","reprehenderit","qui","in","ea","voluptate","velit","esse","quam","nihil","molestiae","et","iusto","odio","dignissimos","ducimus","qui","blanditiis","praesentium","laudantium","totam","rem","voluptatum","deleniti","atque","corrupti","quos","dolores","et","quas","molestias","excepturi","sint","occaecati","cupiditate","non","provident","sed","ut","perspiciatis","unde","omnis","iste","natus","error","similique","sunt","in","culpa","qui","officia","deserunt","mollitia","animi","id","est","laborum","et","dolorum","fuga","et","harum","quidem","rerum","facilis","est","et","expedita","distinctio","nam","libero","tempore","cum","soluta","nobis","est","eligendi","optio","cumque","nihil","impedit","quo","porro","quisquam","est","qui","minus","id","quod","maxime","placeat","facere","possimus","omnis","voluptas","assumenda","est","omnis","dolor","repellendus","temporibus","autem","quibusdam","et","aut","consequatur","vel","illum","qui","dolorem","eum","fugiat","quo","voluptas","nulla","pariatur","at","vero","eos","et","accusamus","officiis","debitis","aut","rerum","necessitatibus","saepe","eveniet","ut","et","voluptates","repudiandae","sint","et","molestiae","non","recusandae","itaque","earum","rerum","hic","tenetur","a","sapiente","delectus","ut","aut","reiciendis","voluptatibus","maiores","doloribus","asperiores","repellat"]}).call(t,r(56)(e))},function(e,t,r){(function(e){"use strict";e.exports=["abbas","abduco","abeo","abscido","absconditus","absens","absorbeo","absque","abstergo","absum","abundans","abutor","accedo","accendo","acceptus","accipio","accommodo","accusator","acer","acerbitas","acervus","acidus","acies","acquiro","acsi","adamo","adaugeo","addo","adduco","ademptio","adeo","adeptio","adfectus","adfero","adficio","adflicto","adhaero","adhuc","adicio","adimpleo","adinventitias","adipiscor","adiuvo","administratio","admiratio","admitto","admoneo","admoveo","adnuo","adopto","adsidue","adstringo","adsuesco","adsum","adulatio","adulescens","adultus","aduro","advenio","adversus","advoco","aedificium","aeger","aegre","aegrotatio","aegrus","aeneus","aequitas","aequus","aer","aestas","aestivus","aestus","aetas","aeternus","ager","aggero","aggredior","agnitio","agnosco","ago","ait","aiunt","alienus","alii","alioqui","aliqua","alius","allatus","alo","alter","altus","alveus","amaritudo","ambitus","ambulo","amicitia","amiculum","amissio","amita","amitto","amo","amor","amoveo","amplexus","amplitudo","amplus","ancilla","angelus","angulus","angustus","animadverto","animi","animus","annus","anser","ante","antea","antepono","antiquus","aperio","aperte","apostolus","apparatus","appello","appono","appositus","approbo","apto","aptus","apud","aqua","ara","aranea","arbitro","arbor","arbustum","arca","arceo","arcesso","arcus","argentum","argumentum","arguo","arma","armarium","armo","aro","ars","articulus","artificiose","arto","arx","ascisco","ascit","asper","aspicio","asporto","assentator","astrum","atavus","ater","atqui","atrocitas","atrox","attero","attollo","attonbitus","auctor","auctus","audacia","audax","audentia","audeo","audio","auditor","aufero","aureus","auris","aurum","aut","autem","autus","auxilium","avaritia","avarus","aveho","averto","avoco","baiulus","balbus","barba","bardus","basium","beatus","bellicus","bellum","bene","beneficium","benevolentia","benigne","bestia","bibo","bis","blandior","bonus","bos","brevis","cado","caecus","caelestis","caelum","calamitas","calcar","calco","calculus","callide","campana","candidus","canis","canonicus","canto","capillus","capio","capitulus","capto","caput","carbo","carcer","careo","caries","cariosus","caritas","carmen","carpo","carus","casso","caste","casus","catena","caterva","cattus","cauda","causa","caute","caveo","cavus","cedo","celebrer","celer","celo","cena","cenaculum","ceno","censura","centum","cerno","cernuus","certe","certo","certus","cervus","cetera","charisma","chirographum","cibo","cibus","cicuta","cilicium","cimentarius","ciminatio","cinis","circumvenio","cito","civis","civitas","clam","clamo","claro","clarus","claudeo","claustrum","clementia","clibanus","coadunatio","coaegresco","coepi","coerceo","cogito","cognatus","cognomen","cogo","cohaero","cohibeo","cohors","colligo","colloco","collum","colo","color","coma","combibo","comburo","comedo","comes","cometes","comis","comitatus","commemoro","comminor","commodo","communis","comparo","compello","complectus","compono","comprehendo","comptus","conatus","concedo","concido","conculco","condico","conduco","confero","confido","conforto","confugo","congregatio","conicio","coniecto","conitor","coniuratio","conor","conqueror","conscendo","conservo","considero","conspergo","constans","consuasor","contabesco","contego","contigo","contra","conturbo","conventus","convoco","copia","copiose","cornu","corona","corpus","correptius","corrigo","corroboro","corrumpo","coruscus","cotidie","crapula","cras","crastinus","creator","creber","crebro","credo","creo","creptio","crepusculum","cresco","creta","cribro","crinis","cruciamentum","crudelis","cruentus","crur","crustulum","crux","cubicularis","cubitum","cubo","cui","cuius","culpa","culpo","cultellus","cultura","cum","cunabula","cunae","cunctatio","cupiditas","cupio","cuppedia","cupressus","cur","cura","curatio","curia","curiositas","curis","curo","curriculum","currus","cursim","curso","cursus","curto","curtus","curvo","curvus","custodia","damnatio","damno","dapifer","debeo","debilito","decens","decerno","decet","decimus","decipio","decor","decretum","decumbo","dedecor","dedico","deduco","defaeco","defendo","defero","defessus","defetiscor","deficio","defigo","defleo","defluo","defungo","degenero","degero","degusto","deinde","delectatio","delego","deleo","delibero","delicate","delinquo","deludo","demens","demergo","demitto","demo","demonstro","demoror","demulceo","demum","denego","denique","dens","denuncio","denuo","deorsum","depereo","depono","depopulo","deporto","depraedor","deprecator","deprimo","depromo","depulso","deputo","derelinquo","derideo","deripio","desidero","desino","desipio","desolo","desparatus","despecto","despirmatio","infit","inflammatio","paens","patior","patria","patrocinor","patruus","pauci","paulatim","pauper","pax","peccatus","pecco","pecto","pectus","pecunia","pecus","peior","pel","ocer","socius","sodalitas","sol","soleo","solio","solitudo","solium","sollers","sollicito","solum","solus","solutio","solvo","somniculosus","somnus","sonitus","sono","sophismata","sopor","sordeo","sortitus","spargo","speciosus","spectaculum","speculum","sperno","spero","spes","spiculum","spiritus","spoliatio","sponte","stabilis","statim","statua","stella","stillicidium","stipes","stips","sto","strenuus","strues","studio","stultus","suadeo","suasoria","sub","subito","subiungo","sublime","subnecto","subseco","substantia","subvenio","succedo","succurro","sufficio","suffoco","suffragium","suggero","sui","sulum","sum","summa","summisse","summopere","sumo","sumptus","supellex","super","suppellex","supplanto","suppono","supra","surculus","surgo","sursum","suscipio","suspendo","sustineo","suus","synagoga","tabella","tabernus","tabesco","tabgo","tabula","taceo","tactus","taedium","talio","talis","talus","tam","tamdiu","tamen","tametsi","tamisium","tamquam","tandem","tantillus","tantum","tardus","tego","temeritas","temperantia","templum","temptatio","tempus","tenax","tendo","teneo","tener","tenuis","tenus","tepesco","tepidus","ter","terebro","teres","terga","tergeo","tergiversatio","tergo","tergum","termes","terminatio","tero","terra","terreo","territo","terror","tersus","tertius","testimonium","texo","textilis","textor","textus","thalassinus","theatrum","theca","thema","theologus","thermae","thesaurus","thesis","thorax","thymbra","thymum","tibi","timidus","timor","titulus","tolero","tollo","tondeo","tonsor","torqueo","torrens","tot","totidem","toties","totus","tracto","trado","traho","trans","tredecim","tremo","trepide","tres","tribuo","tricesimus","triduana","triginta","tripudio","tristis","triumphus","trucido","truculenter","tubineus","tui","tum","tumultus","tunc","turba","turbo","turpe","turpis","tutamen","tutis","tyrannus","uberrime","ubi","ulciscor","ullus","ulterius","ultio","ultra","umbra","umerus","umquam","una","unde","undique","universe","unus","urbanus","urbs","uredo","usitas","usque","ustilo","ustulo","usus","uter","uterque","utilis","utique","utor","utpote","utrimque","utroque","utrum","uxor","vaco","vacuus","vado","vae","valde","valens","valeo","valetudo","validus","vallum","vapulus","varietas","varius","vehemens","vel","velociter","velum","velut","venia","venio","ventito","ventosus","ventus","venustas","ver","verbera","verbum","vere","verecundia","vereor","vergo","veritas","vero","versus","verto","verumtamen","verus","vesco","vesica","vesper","vespillo","vester","vestigium","vestrum","vetus","via","vicinus","vicissitudo","victoria","victus","videlicet","video","viduata","viduo","vigilo","vigor","vilicus","vilis","vilitas","villa","vinco","vinculum","vindico","vinitor","vinum","vir","virga","virgo","viridis","viriliter","virtus","vis","viscus","vita","vitiosus","vitium","vito","vivo","vix","vobis","vociferor","voco","volaticus","volo","volubilis","voluntarius","volup","volutabrum","volva","vomer","vomica","vomito","vorago","vorax","voro","vos","votum","voveo","vox","vulariter","vulgaris","vulgivagus","vulgo","vulgus","vulnero","vulnus","vulpes","vulticulus","vultuosus","xiphias"];
}).call(t,r(56)(e))},function(e,t,r){"use strict";e.exports={month:["January","February","March","April","May","June","July","August","September","October","November","December"],weekday:["Sunday","Monday","Tuesday","Wednesday","Thursday","Friday","Saturday"],weekdayShort:["Sun","Mon","Tue","Wed","Thu","Fri","Sat"],weekdayMin:["Su","Mo","Tu","We","Th","Fr","Sa"],timezone:r(224),past:function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1,r=arguments[1],n=r?new Date(Date.parse(r)):new Date,i=1e3,a=365*t*24*3600*1e3,e=n.getTime();return e-=this.random.number(i,a),n.setTime(e),n},future:function e(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1,r=arguments[1],n=r?new Date(Date.parse(r)):new Date,i=1e3,a=365*t*24*3600*1e3,e=n.getTime();return e+=this.random.number(i,a),n.setTime(e),n},between:function(e,t){var r=Date.parse(e),n=this.random.number(Date.parse(t)-r),i=new Date(r+n);return i},recent:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:1,t=new Date,r=1e3,n=24*e*3600*1e3,i=t.getTime();return i-=this.random.number(r,n),t.setTime(i),t},age:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:18,t=arguments.length>1&&void 0!==arguments[1]?arguments[1]:80;return this.random.number(e,t)}}},function(e,t,r){(function(e){"use strict";e.exports=["Pacific/Midway","Pacific/Pago_Pago","Pacific/Honolulu","America/Juneau","America/Los_Angeles","America/Tijuana","America/Denver","America/Phoenix","America/Chihuahua","America/Mazatlan","America/Chicago","America/Regina","America/Mexico_City","America/Mexico_City","America/Monterrey","America/Guatemala","America/New_York","America/Indiana/Indianapolis","America/Bogota","America/Lima","America/Lima","America/Halifax","America/Caracas","America/La_Paz","America/Santiago","America/St_Johns","America/Sao_Paulo","America/Argentina/Buenos_Aires","America/Guyana","America/Godthab","Atlantic/South_Georgia","Atlantic/Azores","Atlantic/Cape_Verde","Europe/Dublin","Europe/London","Europe/Lisbon","Europe/London","Africa/Casablanca","Africa/Monrovia","Etc/UTC","Europe/Belgrade","Europe/Bratislava","Europe/Budapest","Europe/Ljubljana","Europe/Prague","Europe/Sarajevo","Europe/Skopje","Europe/Warsaw","Europe/Zagreb","Europe/Brussels","Europe/Copenhagen","Europe/Madrid","Europe/Paris","Europe/Amsterdam","Europe/Berlin","Europe/Berlin","Europe/Rome","Europe/Stockholm","Europe/Vienna","Africa/Algiers","Europe/Bucharest","Africa/Cairo","Europe/Helsinki","Europe/Kiev","Europe/Riga","Europe/Sofia","Europe/Tallinn","Europe/Vilnius","Europe/Athens","Europe/Istanbul","Europe/Minsk","Asia/Jerusalem","Africa/Harare","Africa/Johannesburg","Europe/Moscow","Europe/Moscow","Europe/Moscow","Asia/Kuwait","Asia/Riyadh","Africa/Nairobi","Asia/Baghdad","Asia/Tehran","Asia/Muscat","Asia/Muscat","Asia/Baku","Asia/Tbilisi","Asia/Yerevan","Asia/Kabul","Asia/Yekaterinburg","Asia/Karachi","Asia/Karachi","Asia/Tashkent","Asia/Kolkata","Asia/Kolkata","Asia/Kolkata","Asia/Kolkata","Asia/Kathmandu","Asia/Dhaka","Asia/Dhaka","Asia/Colombo","Asia/Almaty","Asia/Novosibirsk","Asia/Rangoon","Asia/Bangkok","Asia/Bangkok","Asia/Jakarta","Asia/Krasnoyarsk","Asia/Shanghai","Asia/Chongqing","Asia/Hong_Kong","Asia/Urumqi","Asia/Kuala_Lumpur","Asia/Singapore","Asia/Taipei","Australia/Perth","Asia/Irkutsk","Asia/Ulaanbaatar","Asia/Seoul","Asia/Tokyo","Asia/Tokyo","Asia/Tokyo","Asia/Yakutsk","Australia/Darwin","Australia/Adelaide","Australia/Melbourne","Australia/Melbourne","Australia/Sydney","Australia/Brisbane","Australia/Hobart","Asia/Vladivostok","Pacific/Guam","Pacific/Port_Moresby","Asia/Magadan","Asia/Magadan","Pacific/Noumea","Pacific/Fiji","Asia/Kamchatka","Pacific/Majuro","Pacific/Auckland","Pacific/Auckland","Pacific/Tongatapu","Pacific/Fakaofo","Pacific/Apia"]}).call(t,r(56)(e))},function(e,t,r){"use strict";function n(e){return e&&e.__esModule?e:{default:e}}var i=r(226),a=n(i);e.exports={uuid:a.default.v4}},function(e,t,r){var n=r(227),i=r(230),a=i;a.v1=n,a.v4=i,e.exports=a},function(e,t,r){function n(e,t,r){var n=t&&r||0,i=t||[];e=e||{};var o=void 0!==e.clockseq?e.clockseq:u,f=void 0!==e.msecs?e.msecs:(new Date).getTime(),h=void 0!==e.nsecs?e.nsecs:l+1,d=f-c+(h-l)/1e4;if(d<0&&void 0===e.clockseq&&(o=o+1&16383),(d<0||f>c)&&void 0===e.nsecs&&(h=0),h>=1e4)throw new Error("uuid.v1(): Can't create more than 10M uuids/sec");c=f,l=h,u=o,f+=122192928e5;var p=(1e4*(268435455&f)+h)%4294967296;i[n++]=p>>>24&255,i[n++]=p>>>16&255,i[n++]=p>>>8&255,i[n++]=255&p;var m=f/4294967296*1e4&268435455;i[n++]=m>>>8&255,i[n++]=255&m,i[n++]=m>>>24&15|16,i[n++]=m>>>16&255,i[n++]=o>>>8|128,i[n++]=255&o;for(var y=e.node||s,g=0;g<6;++g)i[n+g]=y[g];return t?t:a(i)}var i=r(228),a=r(229),o=i(),s=[1|o[0],o[1],o[2],o[3],o[4],o[5]],u=16383&(o[6]<<8|o[7]),c=0,l=0;e.exports=n},function(e,t){(function(t){var r,n=t.crypto||t.msCrypto;if(n&&n.getRandomValues){var i=new Uint8Array(16);r=function(){return n.getRandomValues(i),i}}if(!r){var a=new Array(16);r=function(){for(var e,t=0;t<16;t++)0===(3&t)&&(e=4294967296*Math.random()),a[t]=e>>>((3&t)<<3)&255;return a}}e.exports=r}).call(t,function(){return this}())},function(e,t){function r(e,t){var r=t||0,i=n;return i[e[r++]]+i[e[r++]]+i[e[r++]]+i[e[r++]]+"-"+i[e[r++]]+i[e[r++]]+"-"+i[e[r++]]+i[e[r++]]+"-"+i[e[r++]]+i[e[r++]]+"-"+i[e[r++]]+i[e[r++]]+i[e[r++]]+i[e[r++]]+i[e[r++]]+i[e[r++]]}for(var n=[],i=0;i<256;++i)n[i]=(i+256).toString(16).substr(1);e.exports=r},function(e,t,r){function n(e,t,r){var n=t&&r||0;"string"==typeof e&&(t="binary"==e?new Array(16):null,e=null),e=e||{};var o=e.random||(e.rng||i)();if(o[6]=15&o[6]|64,o[8]=63&o[8]|128,t)for(var s=0;s<16;++s)t[n+s]=o[s];return t||a(o)}var i=r(228),a=r(229);e.exports=n},function(e,t){"use strict";e.exports={address:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{},t=this.address.countryAndCode();return{country:e.country||t.name,countryCode:e.countryCode||t.code,state:e.state||this.address.state(),city:e.city||this.address.city(),street:e.street||this.address.street(),zip:e.zip||this.address.postCode(),geo:e.geo||this.address.geoLocation()}},user:function(e){var t=this.populate("#{names.firstName"+(e?e.toUpperCase():"")+"}"),r=this.populate("#{names.lastName"+(e?e.toUpperCase():"")+"}");return{firstName:t,lastName:r,gender:e,userName:this.internet.userName(t,r),password:this.internet.password(),email:this.internet.email(t,r),phone:this.phone.number(),dob:this.date.past(80),website:this.internet.url(),ip:this.internet.ip(),avatar:this.internet.avatar(),gravatar:this.internet.gravatar(),address:this.entity.address(),status:this.random.boolean()}},company:function(e){return e=e||this.company.name(),{name:e,email:this.internet.email(e),phone:this.phone.number(),website:this.internet.url(),ip:this.internet.ip(),address:this.entity.address()}},post:function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:3;return{title:this.lorem.sentence(),keywords:this.utimes(this.lorem.word,3),created:this.date.recent(7),content:this.times(this.lorem.paragraph,e).join("\r\n")}}}}]))}); | - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
|
comparator_mock.go | package test
import ( | "github.com/stretchr/testify/mock"
)
type MockAgentComparator struct {
mock.Mock
}
func (m *MockAgentComparator) Compare(firstAgent i_agent.IAgent, secondAgent i_agent.IAgent) bool {
args := m.Called()
return args.Bool(0)
} | "go-emas/pkg/i_agent"
|
freq.py | def most_frequent(s):
dic={}
for i in s:
if i in dic:
|
else:
dic[i] = 1
z = sorted(dic.items(), key = lambda x: x[1], reverse = True)
for i in z:
print(i[0]+"="+str(i[1]))
most_frequent('mississippi')
| dic[i] += 1 |
model_598.py | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 149500.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 328000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None) + BidU(C8A=None) | C8A(BidU=1) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1) % BidU(C8A=1) >> C8A(BidU=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0) | Initial(C3A(Xiap=None, ParpU=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0) | |
telnet_default.py | from maza.core.exploit import *
from maza.core.telnet.telnet_client import TelnetClient
from maza.resources import wordlists
class Exploit(TelnetClient):
__info__ = {
"name": "Telnet Default Creds",
"description": "Module performs dictionary attack with default credentials against Telnet service. "
"If valid credentials are found, they are displayed to the user.",
"authors": (
"Marcin Bury <marcin[at]threat9.com>", # routersploit module
),
"devices": (
"Multiple devices",
)
}
target = OptIP("", "Target IPv4, IPv6 address or file with ip:port (file://)")
port = OptPort(23, "Target Telnet port")
threads = OptInteger(8, "Number of threads")
defaults = OptWordlist(wordlists.defaults, "User:Pass or file with default credentials (file://)")
stop_on_success = OptBool(True, "Stop on first valid authentication attempt")
verbosity = OptBool(True, "Display authentication attempts")
def run(self):
self.credentials = []
self.attack()
@multi
def attack(self):
if not self.check():
return
print_status("Starting default credentials attack against Telnet service")
data = LockedIterator(self.defaults)
self.run_threads(self.threads, self.target_function, data)
if self.credentials:
print_success("Credentials found!")
headers = ("Target", "Port", "Service", "Username", "Password") | print_error("Credentials not found")
def target_function(self, running, data):
while running.is_set():
try:
username, password = data.next().split(":")
telnet_client = self.telnet_create()
if telnet_client.login(username, password, retries=3):
if self.stop_on_success:
running.clear()
self.credentials.append((self.target, self.port, self.target_protocol, username, password))
telnet_client.close()
except StopIteration:
break
def check(self):
telnet_client = self.telnet_create()
if telnet_client.test_connect():
print_status("Target exposes Telnet service", verbose=self.verbosity)
return True
print_status("Target does not expose Telnet service", verbose=self.verbosity)
return False
@mute
def check_default(self):
if self.check():
self.credentials = []
data = LockedIterator(self.defaults)
self.run_threads(self.threads, self.target_function, data)
if self.credentials:
return self.credentials
return None | print_table(headers, *self.credentials)
else: |
organic-ui.d.ts | /// <reference path="../dts/globals.d.ts" />
/// <reference path="./reinvent.d.ts" />
declare namespace OrganicUi {
export interface ResultSet<T> {
results: T[];
}
export type PartialForcedType<T, FT> = {
[P in keyof T]?: FT;
} | (keyof T)[];
export interface IDataFormAccessorMsg {
accessor: string;
message: any;
}
export interface PromisedResultSet<T> extends Promise<IListData<T>> {
}
export interface ActionResult extends Promise<any> {
}
export interface IListData<TRow = any> {
totalRows: number;
rows: TRow[];
}
export interface DatePickerProps {
value?, hasTime?, popOverReversed?, style?: React.CSSProperties, onChange?: any;
readonly?;
editorPrefix?: string;
onFocus?: () => void;
placeholder?: string;
onBlur?: Function;
className?: string;
}
interface IContentItem<T, TActions> {
alt: {
itemNo: number;
index: number;
isLast: boolean;
isFirst: boolean;
};
item: T;
actions: TActions;
}
export type IContentFunction<T, TActions> = (item: IContentItem<T, TActions>) => JSX.Element;
export interface IBindableElement {
tryToBinding();
}
export interface IComponentRefer<T = any> {
componentRef: T;
}
export const Version: string;
export class BaseComponent<P = any, S = any> extends React.Component<P, S>{
props: P;
state: S;
autoUpdateState: PartialFunction<S>;
repatch(delta: Partial<S> & { debug?}, target?, delay?: number);
querySelectorAll<T = any>(cssSelector: string, target?: HTMLElement): T[];
setPageTitle(title);
renderErrorMode(title, subtitle);
defaultState(delta: Partial<S>);
asyncRepatch(key: keyof S, asyncFunc: Function, ...args);
setState<K extends keyof S>(
state: ((prevState: Readonly<S>, props: Readonly<P>) => (Pick<S, K> | S | null)) | (Pick<S, K> | S | null),
callback?: () => void
): void;
componentWillMount();
forceUpdate(callBack?: () => void): void;
render(): React.ReactNode;
context: any;
refs: {
[key: string]: React.ReactInstance
};
constructor(props: P);
}
export interface CriticalContentProps { permissionValue: string; permissionKey: string; children?}
export function CriticalContent(p: { permissionKey: string, children?}): JSX.Element;
interface IFieldMessage {
type: 'info' | 'success' | 'danger';
message: string;
by?: string;
}
export type TAccessor = string | Reinvent.IBindingPoint | Reinvent.IBindingPoint[];
export interface IFieldProps<TColProps = any> {
accessor?: TAccessor;
role?: string;
showOpeartors?: boolean;
operators?: string[];
onGet?, onSet?: Function;
onChange?: (value) => void;
onCapture?: (value) => void;
onCaptureDataForm?: (dataForm) => void;
onErrorCode?: (v: any) => ErrorCodeForFieldValidation;
onRenderCell?: (item?: any, index?: number, column?: any) => any;
onInitialRead?: (value: any, row: any) => void;
label?: any;
icon?: any;
required?: boolean;
readonly?: boolean;
messages?: IFieldMessage[];
onlyInput?: boolean;
labelOnTop?: 'always';
onMirror?: Function;
getInfoMessage?: () => string;
children?: any;
className?: string;
renderMode?: string;
trueDisplayText?: string;
falseDisplayText?: string;
filterData?: { fieldType?, ignoreFilter?: boolean };
sortData?: { ignoreSort?: boolean };
avoidSort?: boolean;
defaultOperator?: string;
disableFixedWidth?: boolean;
columnProps?: Partial<TColProps>;
defaultValue?: any;
defaultValueAllowed?: () => boolean;
style?: React.CSSProperties;
persistentCacheKey?: string;
dataEntryOnly?: boolean;
iconName?: string;
}
export interface ActionsForIArrayDataViewItem {
remove: Function;
append: Function;
select: Function;
}
export interface ITipsProps {
tips: React.ReactNode[];
onIgnore?: ((index: number) => (true | false));
onDetailClick?: ((index: number) => void);
defaultActiveTipIndex?: number;
}
export interface IArrayDataViewProps<T> {
value: T[];
onChange?: (value: T[]) => void;
children: IContentFunction<T, ActionsForIArrayDataViewItem>;
defaultItem?: T | (() => T);
minCount?: number;
className?: string;
style?: React.CSSProperties;
}
export interface IAdvSectionProps extends React.HTMLAttributes<any> {
errorMessage: any;
onActionExecute?: (actionName: string) => void;
onCloseMessage: () => void
}
export interface FilterItem {
op: string;
value?: string;
value2?: string;
values?: string[];
fieldName: string;
}
interface UtilsIntf {
uuid(): string;
classNames(...args: string[]): string;
coalesce(...args: any[]): any;
navigate(url);
debounce(func, wait, immediate?);
makeWritable(root: HTMLElement);
makeReadonly(root: HTMLElement);
setIconAndText(code: string, iconCode: string, text?: string);
showIconText(textId);
showIconAndText(textId);
scrollTo(element, to, duration);
i18nFormat(i18nCode, args);
showIcon(icon: string, className?: string);
defaultGetId: ({ id }) => any;
setNoWarn(v);
warn(...args);
renderDevButton(targetText, target: IDeveloperFeatures);
overrideComponentClass(componentClass: React.ComponentClass, extraProps);
overrideFunctionalComponent<T>(componentClass: React.SFC<T>, extraProps: Partial<T>): React.SFC<T>;
accquireDevPortId();
renderButtons(methods: TMethods, opts?: { componentClass?: React.ComponentType, callback?: Function });
reduceEntriesToObject(data: any): any;
limitValue(value: number, opts: { min?, max?}): number;
simulateClick(elem);
merge<T>(...args: Partial<T>[]): T;
toArray(arg): any[];
sumValues(numbers: number[]);
clone<T>(x: T): T;
uniqueArray<T>(array: T[])
validateData<T>(data: T, callbacks: OrganicUi.PartialFunction<T>): OrganicUi.IDataFormAccessorMsg[];
assignDefaultValues<T>(data: T, defaultValues: Partial<T>)
skinDeepRender<T>(type: React.ComponentType<T>, params: T): JSX.Element;
scanElement(element: React.ReactElement<any>, tester: (element) => boolean): JSX.Element;
diff(a, b): any;
getCascadeAttribute(element: HTMLElement, attributeName: string, errorRaised?: boolean): string;
enumToIdNames(enumType: any): ({ Id, Name }[]);
enumToRecords(enumType: any): ({ id, text }[]);
addDays(date: Date, days: number): Date;
numberFormat(n: string | number): string;
hash(data): string;
persianNumber(s: string): string;
delay(ms: number): Promise<void>;
toggleClassOnHover(...classNames: string[]): Partial<React.HTMLAttributes<HTMLElement>>;
findPosition(element: HTMLElement): [number, number];
showErrorMessage(message, title?): any;
fixDataBySchema<T>(data: T, schema): T;
successCallout(content: React.ReactNode): JSX.Element;
failCallout(content: React.ReactNode): JSX.Element;
setDefaultProp<P, KV extends keyof P>(componentType: React.ComponentType<P>, key: KV, value: P[KV]);
}
export const Utils: UtilsIntf;
export const changeCase: { camelCase: Function, snakeCase: Function, paramCase: Function };
export class Field<T> extends BaseComponent<IFieldProps<T>, any>{
getFilterItem(): FilterItem;
getErrorMessage();
revalidate();
getTextReader();
}
export interface IRegistry<T = any> {
data: any;
secondaryValues: any;
notFounded: any;
(key: string | TemplateStringsArray): T;
(key: string, value: T): void;
register(delta: { [key: string]: T }): void;
set(key: string, value: T, extraValue?);
get(key: string): string;
customTester(v: CustomTesterForRegistry, value: T);
clear();
}
export interface DataEntryFor {
}
type TrelloCardProps<P> = P & {
onUpdate: (data: P) => void,
}
type TrelloCardActionProps<P> = {
onUpdate: (data: P) => void;
onDelete: (data: P) => void;
}
export interface ITrelloCard<P> {
contentComponent: React.ComponentType<TrelloCardProps<P>>;
titleComponent: React.ComponentType<TrelloCardProps<P>>;
actions: { iconName, text, isAccessible, handler: (card: P, actionProps: TrelloCardActionProps<P>) => void }[];
fetchNewCard: () => Promise<P>;
}
interface ICardMappingForBoard<TCard> {
id: keyof TCard;
laneId: keyof TCard;
cardType: keyof TCard;
}
export interface BoardProps<TCard> {
lanes: Function | Array<{ id, laneTitle }>;
cards: Function | Array<TCard>;
cardMapping: ICardMappingForBoard<TCard>;
cardTypes: { [index: string]: ITrelloCard<any> }
onDataChange: ({ cards, lanes }) => void;
accquireLaneId: () => any;
}
export class Menu implements IMenu {
id: number;
title: string;
routerLink: string;
href: string;
icon: string;
target: string;
hasSubMenu: boolean;
parentId: number;
selectionLink: string;
constructor(id: number,
title: string,
routerLink: string,
href: string,
icon: string,
target: string,
hasSubMenu: boolean,
parentId: number);
}
export const i18n: IRegistry<React.ReactNode>;
export const routeTable: IRegistry<any>;
export function i18nAttr(key: string): string;
export const icon: IRegistry<any>;
export const editorByAccessor: IRegistry<React.ReactElement<any>>;
export const menuBar: IRegistry<string | Function>;
//--- for businness application & admin panels
export const tags: IRegistry<any>;
export const reports: IRegistry<any>;
export const dashboardBlocks: IRegistry<any>;
export const acl: IRegistry<boolean>;
export interface UiKitProps {
id: string;
}
export const UiKit: React.SFC<UiKitProps>;
export interface OrganicBoxProps<TActions, TOptions, TParams> {
actions?: TActions;
predefinedActions?: string;
options?: TOptions;
params?: TParams;
customActions?: Partial<TActions>;
children?: React.ReactNode;
}
interface IActions {
actions?: any[];
}
export interface IPanelProps extends IActions {
header?: any;
tabs?: string[];
blocks?: any[];
hasSearch?: boolean;
selectedTab?: string;
selectedBlock?: number | string;
onSelectBlock?: (index: number) => void
children: any;
classNamePerChild?: string;
onActionExecute?: (s: string) => void;
}
export const Panel: React.SFC<IPanelProps>;
class OrganicBox<TActions, TOptions, TParams, S> extends BaseComponent<OrganicBoxProps<TActions, TOptions, TParams>, S> {
devPortId: number;
actions: TActions;
constructor(p: OrganicBoxProps<TActions, TOptions, TParams>);
}
export interface ISingleViewBox<T> {
getId(row): any;
getFormData(): T;
setFieldValue(fieldName: string, value);
}
export class ListViewBox<T> extends OrganicBox<IActionsForCRUD<T>, IOptionsForCRUD, IListViewParams, never> {
static fromArray<T>(items: T[], options?: { keyField, fields }): StatelessListView
}
export type ReportReadMethod = (params: IAdvancedQueryFilters) => PromisedResultSet<any>;
interface IActionsForReport {
read: ReportReadMethod;
}
interface AnchorProps extends React.AnchorHTMLAttributes<HTMLElement> {
switchingInClass?: string;
switchingOutClass?: string;
switchingElement?: HTMLElement | string;
switchingDelay?: number;
}
export interface ScrollablePanelProps extends React.DetailsHTMLAttributes<HTMLElement> {
contentClassName?: string;
onSyncScroll?: Function;
onGetHeight?: () => number;
onGetInnerHeight?: () => number;
avoidContentClip?: boolean;
onGetWidth?: () => number;
scrollY?: boolean;
scrollX?: boolean;
reversed?: boolean;
ignore?: boolean;
}
interface ComboBoxProps {
value?: any;
onChange?: any;
items: { Id, Name }[];
}
export const ComboBox: React.SFC<ComboBoxProps>;
export const TimeEdit: React.SFC<ITimeEditProps>;
export interface DataTableProps {
height?: any;
data: any[];
captions: string[];
columnsRenders: (React.ComponentType<any> & { tableCellStyle?: React.CSSProperties, tableCellProps?: React.TdHTMLAttributes<any> })[];
}
export const DataList: React.SFC<OrganicUi.IDataListProps<any>>;
export const DataTreeList: React.SFC<OrganicUi.IDataListProps<any> & Partial<OrganicUi.ITreeListProps>>;
export interface IDataGalleryProps extends IDataListProps {
fieldMapping?: {
key, title, description
}
}
export const DataGallery: React.SFC<IDataGalleryProps>;
interface IDataPanelProps {
header: any;
primary?: boolean;
editable?: boolean;
className?: string;
}
export const DataPanel: React.SFC<IDataPanelProps>;
export interface ICRUDAction {
actionName: string;
onExecute: Function;
}
export interface IActionsForCRUD<TDto> {
mapFormData?: (dto: TDto) => TDto;
beforeSave?: (dto: TDto) => TDto;
create: (dto: TDto) => Promise<any>;
update: (id: any, dto: TDto) => Promise<any>;
deleteList: (hid: any[]) => Promise<any>;
read: (id: any) => Promise<TDto>;
readListByMode?: Map<string, (params: IAdvancedQueryFilters) => PromisedResultSet<TDto>>;
readList: (params: IAdvancedQueryFilters) => PromisedResultSet<TDto>;
export: (format: string, params: IAdvancedQueryFilters) => PromisedResultSet<TDto>;
readByIds: (ids: any[]) => PromisedResultSet<TDto>;
getUrlForSingleView?(id: string): string;
validate?: (data: any) => IDataFormAccessorMsg[];
getText?: (dto: TDto) => string;
getId?: (dto: TDto) => any;
getPageTitle?: (dto: TDto) => string;
onFieldWrite?: (key: string, value, dto: TDto) => void
}
type PartialFunction<T> = {
[P in keyof T]?: ((value: T[P]) => any);
};
export interface IBlankOptions {
title: string;
}
export interface IOptionsForCRUD {
avoidAutoFilter?: boolean;
insertButtonContent?: any;
singularName: string;
routeForSingleView: string;
routeForListView: string;
classNameForListView?: string;
classNameForSingleView?: string;
pluralName: string;
permissionKeys?: { forCreate, forRead, forUpdate, forDelete }
filterOptions?: {
liveMode?: boolean;
}
getUrlForSingleView?: Function;
iconCode?: any;
}
interface IListViewParams {
forDataLookup?: boolean;
multipleDataLookup?: boolean;
parentRefId?: number;
isHidden?: boolean;
height?: number;
width?: number;
selectedId?: any;
corner?: any;
onSelectionChanged?: Function;
onPageChanged?: Function;
customReadList?: Function;
readListMode?: string;
customActions?: { [key: string]: Function };
customReadListArguments?: any[];
canSelectItem?: (row) => boolean;
defaultSelectedValues?: () => { [key: number]: true };
getValue?: () => any;
setValue?: (value) => void;
dataLookup?: any;
filterMode?: 'quick' | 'advanced' | 'none';
noTitle?: boolean;
dataLookupProps?: DataLookupProps;
}
export interface ISingleViewParams {
id;
onNavigate?: (id) => Promise<any>;
noTitle?: boolean;
}
export type StatelessListView = React.SFC<IListViewParams>;
export type StatelessSingleView = React.SFC<ISingleViewParams>;
export interface IModuleManager {
baseUrl: string;
_loadingModules: ({ moduleId, resolve })[];
load(moduleId: string, src?: string): Promise<IModule>;
register(moduleId: string, mod: IModule);
}
export const moduleManager: IModuleManager;
export interface IModule {
setup(opts);
}
type TreeNodeType = number | string;
export interface ITreeListNode {
text: React.ReactNode;
key: TreeNodeType;
parentKey: TreeNodeType;
isLeaf?: boolean;
checkBoxStatus?, extraValue?;
}
export interface ITreeListProps {
value?: ITreeListNode[];
onChange?: (nodes) => any;
height?: number;
nodes: ITreeListNode[];
showCheckBoxes?: boolean;
getNodeClass?: (item: ITreeListNode) => string;
onNodeClick?: (e: React.MouseEvent<HTMLElement>) => void;
mapping?: { key: string, parentKey: string, text: string };
onGetCheckBoxStatus?: (node) => any;
onChangeCheckBoxStatus?: (node, newState) => void;
}
export interface IRegistry<T> {
data: any;
secondaryValues: any;
notFounded: any;
(key: string): T;
(key: string, value: T): void;
register(delta: { [key: string]: T }): void;
set(key: string, value: T, extraValue?);
get(key: string): string;
customTester(v: CustomTesterForRegistry, value: T);
}
export function openRegistry<T>(): IRegistry<T>;
export type CustomTesterForRegistry = (key: string) => boolean | string | RegExp;
export interface IDeveloperFeatures {
devElement: any;
devPortId: any;
forceUpdate(): void;
getDevButton(): JSX.Element;
}
export type DevFriendlyCommand = (target: IDeveloperFeatures & BaseComponent<any, any>) => void;
export const devTools: IRegistry<DevFriendlyCommand>;
export const JsonInspector: React.SFC<any>;
export const DeveloperBar: React.SFC<any> & { topElement, isDevelopmentEnv: boolean, developerFriendlyEnabled };
export function isProdMode(): boolean;
export type ErrorCodeForFieldValidation = string;
export type onErrorCodeResult = (data: any) => OrganicUi.IDataFormAccessorMsg[];
export class ArrayDataView<T> extends BaseComponent<IArrayDataViewProps<T>, never>{
getValue(): T[];
fireAppend();
fireRemove(idx, length?);
}
export interface PortProps {
id: string;
mode: 'log' | 'form';
}
export interface ICompactDataViewProps {
data: any;
children: any;
}
export interface IDataFormProps<T = any> {
validate?: boolean;
onErrorCode?: onErrorCodeResult;
data?: T;
readonly?: boolean;
onChange?: (data: T) => void;
bindingSource?: any;
className?: string;
style?: React.CSSProperties;
children?: any;
settings: OrganicUi.DataForm.FormSettings;
onCustomRenderWithCaptureValues?: Function;
onFieldValidate?: (p: OrganicUi.IFieldProps) => string;
}
export interface ISubmitProps {
className?: string;
buttonComponent: React.ComponentType<any>;
bindingSource: Reinvent.BindingHub;
onExecute: (body) => Promise<any>;
}
interface IFilterPanelProps {
dataForm?: any;
operators?: any[];
customModel?: boolean;
onApplyClick?: () => any;
liveMode?: boolean;
ignoreScrollable?: boolean;
customActions?: { [key: string]: Function };
}
export type TMethods = Function[] | { [key: string]: Function }
export interface IMenu {
id: number;
title: string;
routerLink: string;
selectionLink: string;
href: string;
icon: string;
target: string;
hasSubMenu: boolean;
parentId: number;
}
export interface IAdvancedQueryFilters {
fromRowIndex: number;
toRowIndex: number;
filterModel: FilterItem[];
sortModel: ({ colId: string, sort: string })[];
}
export type OptionsForRESTClient = (() => any) | any;
function restClient<T = any>(method: 'GET' | 'POST' | 'PUT' | 'HEAD' | 'PATCH' | 'DELETE', url: string, data?): Promise<T>;
export interface IAppModel {
getMenuItems(): { menu: IMenu, permission?}[];
defaultMasterPage: () => any;
checkPermission(permissionKey): boolean;
}
export function startApp(appModel: IAppModel);
export interface ITimeSlotRange { from: string, to: string }
export interface ITimeSlotProps {
ranges: ITimeSlotRange[];
prefix?: React.ReactNode;
onChange: (ranges: ITimeSlotRange[]) => void;
}
export interface IDataListLoadReq {
startFrom: number;
rowCount: number;
}
export interface IDataListProps<T = any> {
itemHeight?: number;
onLoadRequestParams?: Function;
multiple?: boolean;
loader?: (req: IDataListLoadReq) => Promise<IListData>;
ignoreScroll?: boolean;
onDoubleClick?: () => void;
onCurrentRowChanged?: (row: any) => any;
rowCount?: number;
template?: string;
height?: number;
minWidth?: number;
popupForActions?: React.ReactNode | Function;
onRowClick?: (rowIdx: number, row: any) => void;
rowSelection?: any;
templatedApplied?: boolean;
corner?: any;
children?: any | any[];
flexMode?: boolean;
startWithEmptyList?: boolean;
className?: string;
customDataRenderer?: (items: any[], dataList?: BaseComponent<OrganicUi.IDataListProps<any>>) => JSX.Element;
detailsListProps?: T;
selection?: any;
accquireSelection?: (selection: any) => void;
itemIsDisabled?: (row: T) => boolean;
onGetClassNamePerRow?: (row: T, rowProps: any) => string;
customActions?: TMethods;
onActionExecute?: Function;
noBestFit?: boolean;
customActionRenderer?: (funcName: string, func: Function) => JSX.Element;
onPageChanged?: Function;
}
export interface IGroupBoxProps {
accessor: Reinvent.BindingHub | Reinvent.BindingHub[];
mode: 'single' | 'list';
readonly: boolean;
}
interface DataListPanelProps extends Partial<IDataPanelProps> {
contentClassName?: string;
formMode?: 'modal' | 'callout' | 'panel' | 'section';
dataListHeight?: number;
dataFormHeight?: number;
avoidAdd?, avoidDelete?, avoidEdit?: boolean;
customBar?: TMethods;
customActions?: TMethods;
customActionRenderer?: (funcName: string, func: Function) => JSX.Element;
onActionExecute?: Function;
accessor?: Reinvent.BindingHub | Reinvent.BindingHub[];
onErrorCode?: onErrorCodeResult;
singularName?, pluralName?: string;
style?: React.CSSProperties;
}
export interface ModalProps {
title?: React.ReactNode;
noClose?: boolean;
type?: 'error' | 'warning';
buttons?: { [buttonName: string]: Function }
buttonHeaders?: { [buttonName: string]: (() => Function) }
children?: React.ReactNode;
onClose?: Function;
}
export interface ImageUploaderProps {
value?: string;
onChange?: (base64: string) => void;
height?: number;
browseButtonText?: string;
}
export class DataListPanel extends BaseComponent<DataListPanelProps>{
static getActiveData<T>(): T;
static bindDetailField<T>(fieldName: keyof T): Function;
}
export namespace DataListPanel {
}
export const FilterPanel: React.SFC<IFilterPanelProps>;
interface DataLookupProps {
source?: React.ComponentType<IListViewParams>;
predefined?: string;
valueAsDisplayText?: boolean;
className?: string;
onChange?: (value) => void;
onFocus?: () => void;
onBlur?: Function;
textField?: React.ReactNode;
onDisplayText?: (value) => React.ReactNode;
canSelectItem?: (row) => boolean;
multiple?: boolean;
value?: any;
iconCode?: string;
minHeightForPopup?: string;
popupMode?: DataLookupPopupMode;
bellowList?: boolean;
appendMode?: boolean;
popOverReversed?: boolean;
style?: React.CSSProperties;
customReadList?: Function;
customReadListArguments?: any;
filterModelAppend?: any[];
disableAdjustEditorPadding?: boolean;
}
export interface IDataLookupPopupModeProps {
isOpen: boolean;
target: HTMLElement;
reversed: boolean;
onClose: Function;
onApply: Function;
onAppend: Function;
dataLookup: any;
dataLookupProps: DataLookupProps;
}
export interface IDataLookupActionProps {
label: any;
onExecute: () => Promise<any>;
}
export type DataLookupPopupMode = React.ComponentClass<IDataLookupPopupModeProps> & { inlineMode: boolean, renderButtons: (p, onClick) => JSX.Element };
export class DataLookup extends BaseComponent<DataLookupProps, never>{
static PopOver: DataLookupPopupMode;
static Modal: DataLookupPopupMode;
static Action: React.SFC<IDataLookupActionProps>;
static predefines: { [key: string]: React.ComponentType<IListViewParams> }
}
export class TreeList extends BaseComponent<ITreeListProps, any>{ }
export namespace DataForm {
export interface FormSettings {
isFieldHidden?: (fieldAccessor: OrganicUi.TAccessor) => boolean;
isFieldReadonly?: (fieldAccessor: OrganicUi.TAccessor, fldProps?: OrganicUi.IFieldProps) => boolean;
}
}
export interface IAdvButtonProps {
noSpinMode?: boolean;
iconName?: string;
children?: any;
isLoading?: boolean;
callout?: any;
primary?: boolean;
style?: React.CSSProperties;
onClick?: () => any;
fixedWidth?: boolean;
className?: string;
outterClassName?: string;
calloutWidth?: number;
lastMod?: number;
buttonComponent?: any;
fullWidth?: boolean;
href?: string;
mini?: boolean;
size?: 'small' | 'medium' | 'large';
type?: string;
variant?: 'text' | 'flat' | 'outlined' | 'contained' | 'raised' | 'fab';
color?: 'inherit' | 'primary' | 'secondary' | 'default';
disabled?: boolean;
text?: string;
}
export const AdvButton: React.SFC<IAdvButtonProps>;
// Custom Components for SepidSystem Company
export const TimeSlot: React.SFC<ITimeSlotProps>;
interface IDialogProps {
title?, content?: any;
actions?: { [key: string]: Function }
defaultValues?: any;
noClose?: boolean;
width?: number;
}
interface AppUtilsIntf {
(p: any): JSX.Element;
showDialog(content, opts?: IDialogProps): void;
closeDialog();
confrim(content, opts?: IDialogProps): Promise<any>;
confrimActionByUser(p: { actionName: string, actionData }): Promise<never>;
showDataDialog<T>(content: React.ReactElement<Partial<IDataFormProps<T>>>, opts?: IDialogProps): Promise<T>;
afterREST({ method, url, data, result });
}
export interface IMessageBarProps {
className?: string;
onClose?: Function;
variant: 'success' | 'warning' | 'error' | 'info';
children?: any;
style?: React.CSSProperties;
}
export interface ITimeEditProps {
value?: string;
keepSeconds?: boolean;
onChange?: React.ChangeEventHandler<HTMLInputElement>;
}
export const AppUtils: AppUtilsIntf;
export const Headline: React.SFC<React.HTMLAttributes<any>>;
export namespace Icons {
export const AddIcon: React.SFC<any>;
export const DeleteIcon: React.SFC<any>;
export const EditIcon: React.SFC<any>;
export const LockIcon: React.SFC<any>;
}
}
declare module '@organic-ui' {
export const reinvent: Reinvent.reinvent;
export type TMethods = OrganicUi.TMethods;
export const Utils: typeof OrganicUi.Utils;
export const AppUtils: typeof OrganicUi.AppUtils;
export const DataLookup: typeof OrganicUi.DataLookup;
export const HiddenField: React.SFC<any>;
export const TreeList: typeof OrganicUi.TreeList;
export const ImageUploader: React.SFC<OrganicUi.ImageUploaderProps>;
export const Modal: React.SFC<OrganicUi.ModalProps>;
export const i18n: typeof OrganicUi.i18n;
export const routeTable: typeof OrganicUi.routeTable;
export type IFieldProps = OrganicUi.IFieldProps<IColumn>;
export class Field extends OrganicUi.Field<IColumn>{
static getAccessorName(accessor): string;
}
export type IAppModel = OrganicUi.IAppModel;
export const startApp: typeof OrganicUi.startApp;
export type Menu = OrganicUi.Menu;
export const Menu: typeof OrganicUi.Menu;
export type IActionsForCRUD<TDto> = OrganicUi.IActionsForCRUD<TDto>;
export type IOptionsForCRUD = OrganicUi.IOptionsForCRUD;
export { AxiosRequestConfig as RequestConfig } from 'axios';
import { AxiosRequestConfig } from 'axios';
export { Selection as DataListSelection } from 'office-ui-fabric-react/lib/DetailsList'
import { SweetAlertOptions, SweetAlertResult, SweetAlertType } from 'sweetalert2';
import { IColumn, IDetailsListProps, IDetailsRowProps } from 'office-ui-fabric-react/lib/DetailsList';
import { IContextualMenuItem } from 'office-ui-fabric-react/lib/ContextualMenu';
import { MenuItemProps } from '@material-ui/core/MenuItem';
export { MenuItemProps } from '@material-ui/core/MenuItem';
import { AnchorHTMLAttributes, CSSProperties, HTMLAttributes, ComponentType } from 'react';
export const JssProvider: any;
export function scanAllPermission(table: { data }): Promise<ITreeListNode[]>;
export type StatelessSingleView = OrganicUi.StatelessSingleView;
export type StatelessListView = OrganicUi.StatelessListView;
export type IAdvancedQueryFilters = OrganicUi.IAdvancedQueryFilters;
export interface OptForRESTClient extends Partial<AxiosRequestConfig> {
title: string;
setBaseURL?: (baseUrl: string) => void;
rejectHandler?: Function;
| appModel?: IAppModel
}
export type OptionsForRESTClient = (() => Partial<OptForRESTClient>) | OptForRESTClient;
export const createClientForREST: (options?: OptionsForRESTClient) => typeof restClient;
function restClient<T = any>(method: 'GET' | 'POST' | 'PUT' | 'HEAD' | 'PATCH' | 'DELETE', url: string, data?): Promise<T>;
export type ResultSet<T> = OrganicUi.ResultSet<T>;
export type ErrorCodeForFieldValidation = OrganicUi.ErrorCodeForFieldValidation;
export type IDataFormAccessorMsg = OrganicUi.IDataFormAccessorMsg;
export type PromisedResultSet<T> = OrganicUi.PromisedResultSet<T>;
export type ReportReadMethod = OrganicUi.ReportReadMethod;
export type ActionResult = Promise<any>;
export type IListData<TRow = any> = OrganicUi.IListData;
interface IBindableElement {
tryToBinding();
}
export const Version: string;
export type IComponentRefer<T = any> = OrganicUi.IComponentRefer;
export class BaseComponent<P = any, S = any> extends OrganicUi.BaseComponent<P, S>{ }
export const moduleManager: typeof OrganicUi.moduleManager;
export type IModule = OrganicUi.IModule;
export const UiKit: typeof OrganicUi.UiKit;
export const Spinner: React.SFC<any>;
export const ViewBox: React.SFC<OrganicUi.OrganicBoxProps<any, any, any>>;
export const DashboardBox: React.SFC<OrganicUi.OrganicBoxProps<any, any, any>>;
export type ISingleViewBox<T = any> = OrganicUi.ISingleViewBox<T> & React.ReactInstance;
export type IListViewParams = OrganicUi.IListViewParams;
export type ISingleViewParams = OrganicUi.ISingleViewParams;
export const ListViewBox: typeof OrganicUi.ListViewBox;
export const Anchor: React.SFC<OrganicUi.AnchorProps>;
export const ScrollablePanel: React.SFC<OrganicUi.ScrollablePanelProps>;
export const DatePicker: React.SFC<OrganicUi.DatePickerProps>;
export const ComboBox: typeof OrganicUi.ComboBox;
export const TimeEdit: typeof OrganicUi.TimeEdit;
export type IAdvButtonProps = OrganicUi.IAdvButtonProps & {
menuItems?: MenuItemProps[];
};
export const AdvButton: React.SFC<IAdvButtonProps>;
export const Panel: typeof OrganicUi.Panel;
export class DataForm extends BaseComponent<Partial<OrganicUi.IDataFormProps>, any> {
revalidateAllFields(): Promise<IDataFormAccessorMsg[]>;
showInvalidItems(invalidItems?: IDataFormAccessorMsg[]): JSX.Element;
getFieldErrorsAsElement(): Promise<JSX.Element>
}
export const CompactDataView: React.SFC<OrganicUi.ICompactDataViewProps>;
export class Port extends BaseComponent<OrganicUi.PortProps>{
}
export const MinimalMasterPage: any;
export class ArrayDataView<T> extends OrganicUi.ArrayDataView<T>{ }
export class DataList extends BaseComponent<OrganicUi.IDataListProps<IDetailsListProps>, never> {
reload();
}
export const DataTreeList: typeof OrganicUi.DataTreeList;
export const DataTable: React.SFC<OrganicUi.DataTableProps>;
export const DataPanel: typeof OrganicUi.DataPanel;
export const DataListPanel: typeof OrganicUi.DataListPanel;
export const FilterPanel: typeof OrganicUi.FilterPanel;
export const i18nAttr: typeof OrganicUi.i18nAttr;
export const icon: typeof OrganicUi.icon;
export const editorByAccessor: typeof OrganicUi.editorByAccessor;
export const menuBar: typeof OrganicUi.menuBar;
export type ITreeListNode = OrganicUi.ITreeListNode;
export type ITreeListProps = OrganicUi.ITreeListProps;
export type CustomTesterForRegistry = OrganicUi.CustomTesterForRegistry;
export type IDeveloperFeatures = OrganicUi.IDeveloperFeatures;
export type IAdvSectionProps = OrganicUi.IAdvSectionProps;
export type ITimeEditProps = OrganicUi.ITimeEditProps;
export type IMessageBarProps = OrganicUi.IMessageBarProps;
export function createGenerateClassName(p: any);
export function Collapsible(p: any);
export type IRegistry<T> = OrganicUi.IRegistry;
export const isProdMode: typeof OrganicUi.isProdMode;
export const devTools: typeof OrganicUi.devTools;
export const JsonInspector: typeof OrganicUi.JsonInspector;
export const DeveloperBar: typeof OrganicUi.DeveloperBar;
export const Headline: typeof OrganicUi.Headline;
// SepidSystem
export type ITimeSlotRange = OrganicUi.ITimeSlotRange;
export const TimeSlot: typeof OrganicUi.TimeSlot;
//--- for businness application & admin panels
export const tags: typeof OrganicUi.tags;
export const reports: typeof OrganicUi.reports;
export const dashboardBlocks: typeof OrganicUi.dashboardBlocks;
export const acl: typeof OrganicUi.acl;
// decorators
function decoSubRender(target: any, propertyName: string, descriptor: TypedPropertyDescriptor<Function>)
export function SubRender(): typeof decoSubRender;
function decoHelper(target: typeof BaseComponent)
export function Helper(helperId): typeof decoHelper;
export function SelfBind(): MethodDecorator;
export const Icons: typeof OrganicUi.Icons;
// Inspired Components;
export { TextField, Dialog, DialogActions, DialogContent, DialogTitle, DialogContentText, Switch, Checkbox, Select, Button, RadioGroup, FormControlLabel, Icon, IconButton, SnackbarContent, Tab, Tabs, Paper, Radio } from '@material-ui/core';
export const Trello: any;
export { GridList, GridListTile } from '@material-ui/core'
export { Callout } from 'office-ui-fabric-react/lib/Callout';
export { Fabric } from 'office-ui-fabric-react/lib/Fabric';
import { ChartConfiguration } from 'c3'
export const C3Chart: React.SFC<ChartConfiguration>;
export const FileUploader: React.SFC<any>;
import SweetAlert2 from 'sweetalert2';
export function Alert(options: ReactSweetAlertOptions): Promise<SweetAlertResult>;
type ReactElementOr<K extends keyof SweetAlertOptions> = SweetAlertOptions[K] | React.ReactElement<any>;
type ReactSweetAlertOptions = Overwrite<SweetAlertOptions, ReactOptions>;
type Overwrite<T, U> = Pick<T, Exclude<keyof T, keyof U>> & U;
interface ReactOptions {
title?: ReactElementOr<'title'>;
html?: ReactElementOr<'html'>;
confirmButtonText?: ReactElementOr<'confirmButtonText'>;
cancelButtonText?: ReactElementOr<'cancelButtonText'>;
footer?: ReactElementOr<'footer'>;
}
interface ReactSweetAlert {
(title?: ReactElementOr<'title'>, message?: ReactElementOr<'html'>, type?: SweetAlertType): Promise<SweetAlertResult>;
(options: ReactSweetAlertOptions & { useRejections?: false }): Promise<SweetAlertResult>;
(options: ReactSweetAlertOptions & { useRejections: true }): Promise<any>;
}
export type ITrelloCard<P> = OrganicUi.ITrelloCard<P>;
export function Board<TCard>(p: OrganicUi.BoardProps<TCard>): React.SFCElement<OrganicUi.BoardProps<TCard>>;
}
declare module '*.jpg';
declare module '*.png';
declare module '*.jpeg';
declare module '*.svg'; | }
export const appData: {
|
home.tsx | import React, {ReactElement} from 'react'
import {useNavigate, NavigateFunction, useLocation, Location} from 'react-router'
import {SafeAreaView, Text, Button} from 'react-native'
import auth from '../../adapters/security/auth'
const PageHome = (): ReactElement => {
const navigate: NavigateFunction = useNavigate()
const location: Location = useLocation()
const loginHandler = (): void => {
const locationState: any = location.state | }
const logoutHandler = (): void => {
auth.logOut(() => {
navigate('/')
})
}
return (
<SafeAreaView>
<Text>Hello World{auth.isAuth() && `, ${auth.getAuthUser()!.username}`}!</Text>
{auth.isAuth() ? <Button title="Logout" onPress={logoutHandler} /> : <Button title="Login" onPress={loginHandler} />}
</SafeAreaView>
)
}
export default PageHome | auth.logIn('user', 'password', false, () => {
navigate(locationState ? locationState.from : '/')
}) |
box_since_3_12.go | // +build !gtk_3_6,!gtk_3_8,!gtk_3_10
// Copyright (c) 2013-2014 Conformal Systems <[email protected]>
//
// This file originated from: http://opensource.conformal.com/
//
// Permission to use, copy, modify, and distribute this software for any
// purpose with or without fee is hereby granted, provided that the above
// copyright notice and this permission notice appear in all copies.
//
// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
// This file includes wrapers for symbols included since GTK 3.12, and
// and should not be included in a build intended to target any older GTK
// versions. To target an older build, such as 3.10, use
// 'go build -tags gtk_3_10'. Otherwise, if no build tags are used, GTK 3.12
// is assumed and this file is built.
// +build !gtk_3_6,!gtk_3_8,!gtk_3_10
package gtk
// #include <gtk/gtk.h>
// #include "gtk.go.h"
import "C"
import (
"unsafe"
"github.com/gotk3/gotk3/glib"
)
// SetCenterWidget is a wrapper around gtk_box_set_center_widget().
func (a *Box) SetCenterWidget(child IWidget) {
if child == nil | else {
C.gtk_box_set_center_widget(a.native(), child.toWidget())
}
}
// GetCenterWidget is a wrapper around gtk_box_get_center_widget().
func (a *Box) GetCenterWidget() *Widget {
w := C.gtk_box_get_center_widget(a.native())
if w == nil {
return nil
}
return &Widget{glib.InitiallyUnowned{glib.Take(unsafe.Pointer(w))}}
}
| {
C.gtk_box_set_center_widget(a.native(), nil)
} |
win_perm.rs | use crate::error::{Error,
Result};
use habitat_win_users::account::Account;
use std::path::Path;
use widestring::WideCString;
use winapi::{shared::{minwindef::DWORD,
ntdef::NULL,
winerror::ERROR_SUCCESS},
um::{accctrl::SE_FILE_OBJECT,
aclapi::SetNamedSecurityInfoW,
winnt::{DACL_SECURITY_INFORMATION,
FILE_ALL_ACCESS,
PACL,
PROTECTED_DACL_SECURITY_INFORMATION,
PSID}}};
use windows_acl::{acl::ACL,
helper};
pub struct PermissionEntry {
pub account: Account,
pub access_mask: DWORD,
}
pub fn set_permissions<T: AsRef<Path>>(path: T, entries: &[PermissionEntry]) -> Result<()> {
let s_path = match path.as_ref().to_str() {
Some(s) => s,
None => {
return Err(Error::PermissionFailed(format!("Invalid path {:?}", &path.as_ref())));
}
};
let ret = unsafe {
SetNamedSecurityInfoW(WideCString::from_str(s_path).unwrap().into_raw(),
SE_FILE_OBJECT,
DACL_SECURITY_INFORMATION | PROTECTED_DACL_SECURITY_INFORMATION,
NULL as PSID,
NULL as PSID,
NULL as PACL,
NULL as PACL)
};
if ret != ERROR_SUCCESS {
return Err(Error::PermissionFailed(format!("OS error resetting \
permissions {}",
ret)));
}
let mut acl = match ACL::from_file_path(s_path, false) {
Ok(acl) => acl,
Err(e) => {
return Err(Error::PermissionFailed(format!("OS error {} retrieving \
ACLs from path path {:?}",
e,
&path.as_ref())));
}
};
for entry in entries {
if let Err(e) = acl.allow(entry.account.sid.raw.as_ptr() as PSID,
true,
entry.access_mask)
{
return Err(Error::PermissionFailed(format!("OS error {} setting \
permissions for {}",
e, entry.account.name)));
}
}
Ok(())
}
/// This is a convevience function that will essentially apply the default
/// permissions to a path but remove entries for the Users and Authenticated_Users
/// resulting in FULL_CONTROL access for Administrators, SYSTEM and the current
/// user. In nearly all Supervisor scenarios where we need to adjust permissions,
/// this is the desired ACL state.
pub fn harden_path<T: AsRef<Path>>(path: T) -> Result<()> {
let current_user = match helper::current_user() {
Some(u) => u,
None => {
return Err(Error::CryptoError(format!("Unable to find current user \
setting permissions for {}",
path.as_ref().display())));
}
};
let entries = vec![PermissionEntry { account:
Account::from_name(¤t_user).expect("current user account \
to exist"),
access_mask: FILE_ALL_ACCESS, },
PermissionEntry { account: Account::built_in_administrators(),
access_mask: FILE_ALL_ACCESS, },
PermissionEntry { account: Account::local_system(),
access_mask: FILE_ALL_ACCESS, },];
set_permissions(path.as_ref(), &entries)
}
#[cfg(test)]
mod tests {
use std::{fs::File,
io::Write,
path::Path};
use tempfile::{Builder,
NamedTempFile};
use winapi::um::winnt::FILE_ALL_ACCESS;
use windows_acl::helper;
use habitat_win_users::account;
use super::*;
use crate::error::Error;
#[test]
fn set_permissions_ok_test() {
let tmp_dir = Builder::new().prefix("foo")
.tempdir()
.expect("create temp dir");
let file_path = tmp_dir.path().join("test.txt");
let mut tmp_file = File::create(&file_path).expect("create temp file");
writeln!(tmp_file, "foobar123").expect("write temp file");
let current_user = helper::current_user().expect("find current user");
let entries = vec![PermissionEntry { account:
account::Account::from_name(¤t_user).unwrap(),
access_mask: FILE_ALL_ACCESS, }];
assert!(set_permissions(&file_path, &entries).is_ok());
let acl = ACL::from_file_path(file_path.to_str().unwrap(), false).expect("obtain file ACL");
let mut acl_entries = acl.all().expect("retrieve all acl entries");
assert_eq!(acl_entries.len(), 1);
let entry = acl_entries.remove(0);
assert_eq!(entry.mask, entries[0].access_mask);
assert_eq!(
helper::sid_to_string(entry.sid.unwrap().as_ptr() as PSID).expect("name from sid"),
entries[0].account.sid.to_string().expect("sid to string")
);
drop(tmp_file);
tmp_dir.close().expect("delete temp dir");
}
#[test]
fn set_permissions_fail_test() {
let badpath = Path::new("this_file_should_never_exist_deadbeef");
let current_user = helper::current_user().expect("find current user");
let entries = vec![PermissionEntry { account:
account::Account::from_name(¤t_user).unwrap(),
access_mask: FILE_ALL_ACCESS, }];
match set_permissions(badpath, &entries) {
Ok(_) => {
panic!("Shouldn't be able to set permissions on non-existent file, but did!");
}
Err(Error::PermissionFailed(_)) => { /* OK */ }
Err(e) => {
panic!("Got unexpected error setting permissions a non-existent file: {:?}",
e);
}
}
}
#[test]
fn harden_path_test() |
}
| {
let file = NamedTempFile::new().expect("to create temp file");
assert!(harden_path(file.path()).is_ok());
assert!(harden_path("C:/this/is/a/nonexistant/path").is_err());
} |
tests.py | #!/usr/bin/env python3
# Copyright 2018 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encapsulate nova testing."""
import logging
import unittest
import zaza.model
import zaza.openstack.charm_tests.test_utils as test_utils
import zaza.openstack.charm_tests.glance.setup as glance_setup
import zaza.openstack.configure.guest
class BaseGuestCreateTest(unittest.TestCase):
"""Deprecated: Use zaza.openstack.configure.guest.launch_instance."""
def launch_instance(self, instance_key):
"""Deprecated: Use zaza.openstack.configure.guest.launch_instance."""
logging.info('BaseGuestCreateTest.launch_instance is deprecated '
'please use '
'zaza.openstack.configure.guest.launch_instance')
zaza.openstack.configure.guest.launch_instance(instance_key)
class CirrosGuestCreateTest(BaseGuestCreateTest):
"""Tests to launch a cirros image."""
def test_launch_small_instance(self):
"""Launch a cirros instance and test connectivity."""
zaza.openstack.configure.guest.launch_instance(
glance_setup.CIRROS_IMAGE_NAME)
class LTSGuestCreateTest(BaseGuestCreateTest):
"""Tests to launch a LTS image."""
def test_launch_small_instance(self):
"""Launch a Bionic instance and test connectivity."""
zaza.openstack.configure.guest.launch_instance(
glance_setup.LTS_IMAGE_NAME)
class NovaCompute(test_utils.OpenStackBaseTest):
"""Run nova-compute specific tests."""
def test_500_hugepagereport_action(self):
"""Test hugepagereport action."""
for unit in zaza.model.get_units('nova-compute',
model_name=self.model_name):
logging.info('Running `hugepagereport` action'
' on unit {}'.format(unit.entity_id))
action = zaza.model.run_action(
unit.entity_id,
'hugepagereport',
model_name=self.model_name,
action_params={})
if "failed" in action.data["status"]:
raise Exception(
"The action failed: {}".format(action.data["message"]))
def | (self):
"""Checking restart happens on config change.
Change disk format and assert then change propagates to the correct
file and that services are restarted as a result
"""
# Expected default and alternate values
current_value = zaza.model.get_application_config(
'nova-compute')['debug']['value']
new_value = str(not bool(current_value)).title()
current_value = str(current_value).title()
set_default = {'debug': current_value}
set_alternate = {'debug': new_value}
default_entry = {'DEFAULT': {'debug': [current_value]}}
alternate_entry = {'DEFAULT': {'debug': [new_value]}}
# Config file affected by juju set config change
conf_file = '/etc/nova/nova.conf'
# Make config change, check for service restarts
logging.info(
'Setting verbose on nova-compute {}'.format(set_alternate))
self.restart_on_changed(
conf_file,
set_default,
set_alternate,
default_entry,
alternate_entry,
['nova-compute'])
def test_920_change_aa_profile(self):
"""Test changing the Apparmor profile mode."""
services = ['nova-compute']
set_default = {'aa-profile-mode': 'enforce'}
set_alternate = {'aa-profile-mode': 'complain'}
mtime = zaza.model.get_unit_time(
self.lead_unit,
model_name=self.model_name)
logging.debug('Remote unit timestamp {}'.format(mtime))
with self.config_change(set_default, set_alternate):
logging.info(
'Waiting for services ({}) to be restarted'.format(services))
zaza.model.block_until_services_restarted(
'nova-compute',
mtime,
services,
model_name=self.model_name)
for unit in zaza.model.get_units('nova-compute',
model_name=self.model_name):
logging.info('Checking number of profiles in complain '
'mode in {}'.format(unit.entity_id))
run = zaza.model.run_on_unit(
unit.entity_id,
'aa-status --complaining',
model_name=self.model_name)
output = run['Stdout']
self.assertTrue(int(output) >= len(services))
def test_901_pause_resume(self):
"""Run pause and resume tests.
Pause service and check services are stopped then resume and check
they are started
"""
with self.pause_resume(['nova-compute']):
logging.info("Testing pause resume")
def test_930_check_virsh_default_network(self):
"""Test default virt network is not present."""
for unit in zaza.model.get_units('nova-compute',
model_name=self.model_name):
logging.info('Checking default network is absent on '
'unit {}'.format(unit.entity_id))
run = zaza.model.run_on_unit(
unit.entity_id,
'virsh net-dumpxml default',
model_name=self.model_name)
self.assertFalse(int(run['Code']) == 0)
class SecurityTests(test_utils.OpenStackBaseTest):
"""Nova Compute security tests tests."""
@classmethod
def setUpClass(cls):
"""Run class setup for running Nova Compute SecurityTests."""
super(SecurityTests, cls).setUpClass()
def test_security_checklist(self):
"""Verify expected state with security-checklist."""
# Changes fixing the below expected failures will be made following
# this initial work to get validation in. There will be bugs targeted
# to each one and resolved independently where possible.
expected_failures = [
'is-volume-encryption-enabled',
'validate-uses-tls-for-glance',
'validate-uses-tls-for-keystone',
]
expected_passes = [
'validate-file-ownership',
'validate-file-permissions',
'validate-uses-keystone',
]
for unit in zaza.model.get_units('nova-compute',
model_name=self.model_name):
logging.info('Running `security-checklist` action'
' on unit {}'.format(unit.entity_id))
test_utils.audit_assertions(
zaza.model.run_action(
unit.entity_id,
'security-checklist',
model_name=self.model_name,
action_params={}),
expected_passes,
expected_failures,
expected_to_pass=False)
| test_900_restart_on_config_change |
rpc_test.go | package neorpc_test
import (
"log"
"testing"
"github.com/guotie/neoutils/neorpc"
)
func TestEndpoint(t *testing.T) {
client := neorpc.NewClient("http://localhost:30333")
if client == nil {
t.Fail()
}
log.Printf("%v", client)
}
func TestGetContractState(t *testing.T) {
client := neorpc.NewClient("http://localhost:30333")
if client == nil {
t.Fail()
}
result := client.GetContractState("ce575ae1bb6153330d20c560acb434dc5755241b")
log.Printf("%+v", result)
}
func TestSendRawTransaction(t *testing.T) {
client := neorpc.NewClient("http://localhost:30333")
if client == nil {
t.Fail()
}
raw := "d1004208e8030000000000001423ba2703c53263e8d6e522dc32203339dcd8eee952c10c6d696e74546f6b656e73546f671b245557dc34b4ac60c5200d335361bbe15a57ce01f11e74686973697361756e69717565746f6b656e5f66726f6d5f73747269706501e216181b1f9a773f93064af30be44679f34ec878788afa1727aa60057eb39a96000001e72d286979ee6cb1b7e65dfddfb2e384100b8d148e7758de42e4168b71792c60010000000000000023ba2703c53263e8d6e522dc32203339dcd8eee9014140f55e2b2914c409396904b8c5a1e8ec0ffc0b62f8b1b996beae7c65ceca7e11a3dbab011038b948ec380c5b22ba474f013ca6de61051dda487a5bec17196115412321031a6c6fbbdf02ca351745fa86b9ba5a9452d785ac4f7fc2b7548ca2a46c4fcf4aacce575ae1bb6153330d20c560acb434dc5755241b"
result := client.SendRawTransaction(raw)
log.Printf("%+v", result)
}
func TestGetRawTransaction(t *testing.T) {
client := neorpc.NewClient("http://localhost:30333")
if client == nil {
t.Fail()
}
txID := "bde02f8c6482e23d5b465259e3e438f0acacaba2a7a938d5eecd90bba0e9d1ad"
result := client.GetRawTransaction(txID)
log.Printf("%+v", result)
}
func TestGetBlock(t *testing.T) {
client := neorpc.NewClient("http://seed2.o3node.org:10332")
if client == nil {
t.Fail()
}
txID := "5ba40a700fbdd72344d2903629fac10b55e7a957d17d38e475a20ab18766fa7b"
result := client.GetBlock(txID)
log.Printf("%+v", len(result.Result.Tx))
}
func TestGetBlockByIndex(t *testing.T) {
client := neorpc.NewClient("http://seed2.o3node.org:10332")
if client == nil {
t.Fail()
}
index := 2188171
result := client.GetBlockByIndex(index)
log.Printf("%+v", result)
}
func TestGetBlockCount(t *testing.T) {
client := neorpc.NewClient("http://seed2.o3node.org:10332")
if client == nil {
t.Fail()
}
result := client.GetBlockCount()
log.Printf("%+v", result.Result)
}
func TestGetAccountState(t *testing.T) {
client := neorpc.NewClient("http://seed2.o3node.org:10332")
if client == nil {
t.Fail()
}
result := client.GetAccountState("AdSBfV9kMmN2Q3xMYSbU33HWQA1dCc9CV3")
log.Printf("%+v", result.Result)
}
func TestGetTokenBalance(t *testing.T) {
client := neorpc.NewClient("http://localhost:30333")
if client == nil {
t.Fail()
}
result := client.GetTokenBalance("0xc2b0fed82b8fa28c358f99849136f45f057bb6fe", "APLNwfJTHp1MBHYNeMAxkeqNCquLpBVjcD")
log.Printf("%+v", result.Result)
}
func TestInvokeScript(t *testing.T) | {
client := neorpc.NewClient("http://seed2.neo.org:20332")
script := "00c1046e616d6567f8e679d19048360e414c82d82fdb33486438d37c00c10673796d626f6c67f8e679d19048360e414c82d82fdb33486438d37c00c10b746f74616c537570706c7967f8e679d19048360e414c82d82fdb33486438d37c"
if client == nil {
t.Fail()
}
result := client.InvokeScript(script)
log.Printf("%+v", result.Result)
} |
|
messages.controller.js | (function() {
'use strict';
angular
.module('app.pages')
.controller('MessagesController', MessagesController)
.controller('MessageViewModalController', MessageViewModalController)
.controller('MessageNewModalController', MessageNewModalController);
MessagesController.$inject = ['$uibModal'];
function MessagesController($uibModal) {
var vm = this;
activate();
////////////////
function activate() {
vm.display = function() {
var modalBarInstance = $uibModal.open({
animation: true,
templateUrl: 'app/views/messages.view.tpl.html',
controller: 'MessageViewModalController as mod',
// position via css class
windowClass: 'modal-right modal-auto-size',
backdropClass: '',
// sent data to the modal instance (injectable into controller)
resolve: {
data: function() {
return {
title: 'Settings'
};
}
}
});
modalBarInstance.result.then(function( /*data*/ ) {
// use data from modal here
}, function() {
// Modal dismissed
});
};
vm.compose = function() {
var modalBarInstance = $uibModal.open({
animation: true,
templateUrl: 'app/views/messages.new.tpl.html',
controller: 'MessageNewModalController as mod',
// position via css class
// windowClass: 'modal-right modal-auto-size',
// backdropClass: '',
// sent data to the modal instance (injectable into controller)
resolve: {
data: function() {
return {
title: 'Settings'
};
}
}
});
modalBarInstance.result.then(function( /*data*/ ) {
// use data from modal here
}, function() {
// Modal dismissed
});
};
}
}
MessageNewModalController.$inject = ['$uibModalInstance', 'data'];
function MessageNewModalController($uibModalInstance, data) {
var vm = this;
activate();
////////////////
function activate() {
vm.modalTitle = data.title;
vm.close = function() {
$uibModalInstance.close( /* data for promise*/ );
};
vm.cancel = function() {
| }
MessageViewModalController.$inject = ['$uibModalInstance', 'data'];
function MessageViewModalController($uibModalInstance, data) {
var vm = this;
activate();
////////////////
function activate() {
vm.modalTitle = data.title;
vm.close = function() {
$uibModalInstance.close( /* data for promise*/ );
};
vm.cancel = function() {
$uibModalInstance.dismiss('cancel');
};
}
}
})(); | $uibModalInstance.dismiss('cancel');
};
}
|
test_vocabularies.py | import pytest
pytestmark = pytest.mark.asyncio
@pytest.mark.app_settings({"applications": ["guillotina", "guillotina.contrib.vocabularies"]})
async def | (container_requester):
async with container_requester as requester:
response, _ = await requester("GET", "/db/guillotina/@vocabularies")
assert len(list(filter(lambda x: x.get("title") == "languages", response))) > 0
assert len(list(filter(lambda x: x["title"] == "countries", response))) > 0
response, _ = await requester("GET", "/db/guillotina/@vocabularies/languages")
assert len(list(filter(lambda x: x.get("token") == "ca", response["items"]))) > 0
response, _ = await requester("GET", "/db/guillotina/@vocabularies/countries")
assert len(list(filter(lambda x: x.get("token") == "AD", response["items"]))) > 0
| test_contrib_vocabulary |
main.go | // Copyright 2019 The Operator-SDK Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
// Import all Kubernetes client auth plugins (e.g. Azure, GCP, OIDC, etc.)
// to ensure that `exec-entrypoint` and `run` can make use of them.
_ "k8s.io/client-go/plugin/pkg/client/auth"
| kbutil "github.com/operator-framework/operator-sdk/internal/util/kubebuilder"
"github.com/operator-framework/operator-sdk/internal/util/projutil"
log "github.com/sirupsen/logrus"
)
func main() {
// Use the new KB CLI when running inside a Kubebuilder project with an existing PROJECT file.
if kbutil.HasProjectFile() {
if err := cli.Run(); err != nil {
log.Fatal(err)
}
return
}
// Use the legacy CLI if inside of a Go/Helm/Ansible legacy project
operatorType := projutil.GetOperatorType()
switch operatorType {
case projutil.OperatorTypeGo, projutil.OperatorTypeHelm, projutil.OperatorTypeAnsible:
// Deprecation warning for Go projects
// TODO/Discuss: UX wise, is displaying this notice on every command that runs
// in the legacy Go projects too loud.
if operatorType == projutil.OperatorTypeGo {
depMsg := "Operator SDK has a new CLI and project layout that is aligned with Kubebuilder.\n" +
"See `operator-sdk init -h` and the following doc on how to scaffold a new project:\n" +
"https://sdk.operatorframework.io/docs/golang/quickstart/\n" +
"To migrate existing projects to the new layout see:\n" +
"https://sdk.operatorframework.io/docs/golang/project_migration_guide/\n"
projutil.PrintDeprecationWarning(depMsg)
}
if err := cli.RunLegacy(); err != nil {
log.Fatal(err)
}
return
}
// Run the KB CLI when not running in either legacy or new projects
// The new CLI still supports "operator-sdk new --type=Ansible/Helm"
if err := cli.Run(); err != nil {
log.Fatal(err)
}
} | "github.com/operator-framework/operator-sdk/cmd/operator-sdk/cli" |
droppath.py | """
Modified from https://github.com/rwightman/pytorch-image-models/blob/master/timm/models/layers/drop.py
"""
import oneflow as flow
import oneflow.nn as nn
import oneflow.nn.functional as F
def drop_path(x, drop_prob: float = 0.5, training: bool = False):
|
class DropPath(nn.Module):
"""Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
"""
def __init__(self, drop_prob=None):
super(DropPath, self).__init__()
self.drop_prob = drop_prob
def forward(self, x):
return drop_path(x, self.drop_prob, self.training)
| """Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
This is the same as the DropConnect impl I created for EfficientNet, etc networks, however,
the original name is misleading as 'Drop Connect' is a different form of dropout in a separate paper...
See discussion: https://github.com/tensorflow/tpu/issues/494#issuecomment-532968956 ... I've opted for
changing the layer and argument names to 'drop path' rather than mix DropConnect as a layer name and use
'survival rate' as the argument.
"""
if drop_prob == 0.0 or not training:
return x
keep_prob = 1 - drop_prob
shape = (x.shape[0],) + (1,) * (
x.ndim - 1
) # work with diff dim tensors, not just 2D ConvNets
random_tensor = flow.rand(*shape, dtype=x.dtype, device=x.device) + keep_prob
random_tensor = random_tensor.floor() # binarize
output = x.div(keep_prob) * random_tensor
return output |
slide.tsx | import React, {
CSSProperties,
FC,
useState,
useEffect,
useRef,
useCallback
} from 'react';
import classNames from 'classnames';
import styles from './_style.module.scss';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
export interface ISlideProps {
height?: string;
className?: string;
style?: CSSProperties;
interval?: number;
onChange?: (currIdx: number) => void;
}
export interface ISlideCtx {
activeIdx: number;
// 默认从左向右轮播,reverse 为 true 时表示从右向左轮播
reverseAnimation: boolean;
}
// 导出的 ctx 对象
export const SlideCtx = React.createContext<ISlideCtx>({
activeIdx: 1,
reverseAnimation: false
});
const Slide: FC<ISlideProps> = (props) => {
const { onChange, className, style } = props;
const [currIdx, setCurrIdx] = useState<number>(0);
const [reverseAnimation, setReverseAnimation] = useState<boolean>(false);
const classes = classNames(styles.slide, className);
const { length } = props.children as Array<React.ReactNode>;
// 切换时触发 onChange 回调
useEffect(() => {
onChange && onChange(currIdx);
}, [currIdx, onChange]);
// 两个切换按钮的点击事件
const handleRBtnClick = useCallback(() => {
reverseAnimation && setReverseAnimation(false);
if (currIdx + 1 === length) {
return setCurrIdx(0);
}
setCurrIdx(currIdx + 1);
}, [currIdx, reverseAnimation, length]);
const handleLBtnClick = () => {
!reverseAnimation && setReverseAnimation(true);
if (currIdx <= 0) {
return setCurrIdx(length - 1);
}
setCurrIdx(currIdx - 1);
};
const timerRef = useRef<NodeJS.Timeout | null>(null);
const isHoverRef = useRef(false);
// 自动轮播
const startTimer: () => NodeJS.Timeout | null = useCallback(() => {
// 创建定时器时可能存在其他的轮播定时器,先清除
if (timerRef.current) {
stopTimer(timerRef.current);
}
const interval = props.interval ? props.interval : 3000;
let timerId = setInterval(() => {
handleRBtnClick();
}, interval);
return timerId;
}, [handleRBtnClick, props.interval]);
const stopTimer = (timerId?: NodeJS.Timeout | null) => {
timerId && clearInterval(timerId);
return null;
};
useEffect(() => {
if (isHoverRef.current) {
return;
}
timerRef.current = startTimer();
return () => {
timerRef.current = stopTimer(timerRef.current);
};
}, [startTimer]);
const handleMouseEnter = () => {
isHoverRef.current = true;
timerRef.current = stopTimer(timerRef.current); | };
const handleMouseLeave = () => {
isHoverRef.current = false;
timerRef.current = startTimer();
};
return (
<div
className={classes}
style={{ height: props.height, ...style }}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
>
<SlideCtx.Provider
value={{ activeIdx: currIdx, reverseAnimation: reverseAnimation }}
>
<FontAwesomeIcon
className={`${styles.slide_arrow} ${styles.slide_arrow_l}`}
icon="chevron-circle-left"
onClick={handleLBtnClick}
/>
<FontAwesomeIcon
className={`${styles.slide_arrow} ${styles.slide_arrow_r}`}
icon="chevron-circle-right"
onClick={handleRBtnClick}
/>
<div className={styles.slide_wrapper}>{props.children}</div>
</SlideCtx.Provider>
</div>
);
};
export default Slide; | |
api.py | import datetime
import calendar
import httpagentparser
from flask import Blueprint, make_response, jsonify, request, url_for, render_template
from models import PostModel, TagModel, LikeModel, ReplyModel, Analyze_Pages, UserModel, Ip_Coordinates, bcrypt, \
Notifications_Model, Subscriber, Analyze_Session
import datetime as dt
from sqlalchemy import desc, func, or_
from sqlalchemy.schema import Sequence
import socket
import smtplib
import dns.resolver
import urllib
from app import db, serializer, BadSignature, BadTimeSignature, SignatureExpired, mail, translate, key_c, config
from flask_mail import Message
import requests
import jwt
import re
import os
from PIL import Image
import readtime
from webptools import webplib as webp
import json
from pywebpush import webpush, WebPushException
api = Blueprint(
'api', __name__,
url_prefix='/api'
)
VAPID_PRIVATE_KEY = "gdZv-jxuKPeaukXrkXlKZ33j4zbLDe60WCnAN0Pba94"
VAPID_PUBLIC_KEY = "BGfsb_G1tXj-jSN8h-9spz2znzfm1sib-Xx42FLmN8p7xQwv8C_ke_-77DFKkBiv843msSFlvQw0PDr2--mpJmw"
VAPID_CLAIMS = {"sub": "mailto:[email protected]"}
class CustomDict(dict):
def __init__(self):
self = dict()
def add(self, key, value):
self[key] = value
def __missing__(self, key):
value = self[key] = type(self)() # retain local pointer to value
return value
def send_notification(users, body):
check = Subscriber.query.filter(Subscriber.user.in_([users])).all()
for c in check:
try:
sub = (str(c.subscription_info).encode().decode('utf-8')).replace("'", '"')
sub = sub.replace("None", "null")
body = ((str(body).replace("'", '"')).replace("None", "null"))
send_web_push(json.loads(sub), body)
except:
pass
def send_web_push(subscription_information, body):
return webpush(
subscription_info=subscription_information,
data=body,
vapid_private_key=VAPID_PRIVATE_KEY,
vapid_claims=VAPID_CLAIMS
)
def save_img(post_id):
# if(form_img.data):
file_name, file_ext = os.path.splitext(request.files['image'].filename)
picture_fn = 'post_' + str(post_id) + file_ext
picture_path = os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn)
i = Image.open(request.files['image'])
i.save(picture_path)
webp.cwebp(os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn),
os.path.join(config['UPLOAD_FOLDER_POST'], 'post_' + str(post_id) + '.webp'), "-q 80")
os.remove(os.path.join(config['UPLOAD_FOLDER_POST'], picture_fn))
picture_fn = 'post_' + str(post_id) + '.webp'
return picture_fn
def save_img(user_id, type):
# if(form_img.data):
if type == 'profile':
file_name, file_ext = os.path.splitext(request.files['avatarimg'].filename)
users = db.session.query(UserModel).filter_by(id=user_id)
picture_fn = 'user_' + str(user_id) + str(file_ext)
picture_path = os.path.join(config['UPLOAD_FOLDER_PROFILE'], picture_fn)
elif type == 'cover':
file_name, file_ext = os.path.splitext(request.files['coverimg'].filename)
users = db.session.query(UserModel).filter_by(id=user_id)
picture_fn = 'user_' + str(user_id) + str(file_ext)
picture_path = os.path.join(config['UPLOAD_FOLDER_PROFILE_COVER'], picture_fn)
if type == 'profile':
i = Image.open(request.files['avatarimg'])
output_size = (500, 500)
i.thumbnail(output_size)
elif type == 'cover':
i = Image.open(request.files['coverimg'])
i.save(picture_path)
if type == 'profile':
webp.cwebp(os.path.join(config['UPLOAD_FOLDER_PROFILE'], picture_fn),
os.path.join(config['UPLOAD_FOLDER_PROFILE'], 'user_' + str(user_id) + '.webp'), "-q 80")
elif type == 'cover':
webp.cwebp(os.path.join(config['UPLOAD_FOLDER_PROFILE_COVER'], picture_fn),
os.path.join(config['UPLOAD_FOLDER_PROFILE_COVER'], 'user_' + str(user_id) + '.webp'), "-q 80")
picture_fn = 'user_' + str(user_id) + '.webp'
return picture_fn
def getItemForKey(value):
return value['trending_value']
def getItemForKeyN(value):
return value['id']
def | (raw_html):
cleanr = re.compile('<.*?>|&([a-z0-9]+|#[0-9]{1,6}|#x[0-9a-f]{1,6});')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
@api.route('/home', methods=['GET'])
def home():
t = request.args.get('t')
token = None
if t:
token = str(t).encode()
try:
user = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user_info = UserModel.query.filter_by(id=user['id']).first()
tag = request.args.get('tag')
mode = request.args.get('mode')
search = request.args.get('search')
if tag:
tag_posts = TagModel.query.filter_by(name=tag).first()
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tag_posts.post)).order_by(desc(PostModel.posted_on)).paginate(page=1, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(PostModel.id.in_(tag_posts.post)).order_by(
desc(PostModel.posted_on)).paginate(page=1, per_page=9)
elif mode == 'saved':
if token:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(user_info.saved_posts)).order_by(desc(PostModel.posted_on)).paginate(page=1,
per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(page=1,
per_page=9)
elif mode == 'recent':
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=1, per_page=9)
else:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(page=1,
per_page=9)
elif mode == 'discuss' or mode == 'questions' or mode == 'tutorials':
if mode == 'discuss':
tg = TagModel.query.filter(TagModel.name.in_(['discuss', 'talk'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
elif mode == 'tutorials':
tg = TagModel.query.filter(TagModel.name.in_(['tutorial', 'howto', 'tutorials', 'how_to'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
elif mode == 'questions':
tg = TagModel.query.filter(TagModel.name.in_(['help', 'question'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
tgi = []
for t in tg:
tgi.extend(t.post)
if token:
posts = PostModel.query.filter(or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tgi)).order_by(PostModel.id.desc()).paginate(page=1, per_page=9)
else:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).filter(PostModel.id.in_(tgi)).order_by(
desc(PostModel.posted_on)).paginate(page=1, per_page=9)
elif search:
results, total = PostModel.search_post(request.args.get('search'), 1, 9, 'en')
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(results)).order_by(desc(PostModel.posted_on)).paginate(page=1, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(PostModel.id.in_(results)).order_by(
desc(PostModel.posted_on)).paginate(page=1, per_page=9)
else:
if token:
if len(user_info.int_tags) > 0:
tg = TagModel.query.filter(TagModel.name.in_(user_info.int_tags)).order_by(
desc(func.array_length(TagModel.post, 1))).all()
tgi = []
for t in tg:
tgi.extend(t.post)
if len(user_info.follow) > 0:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
or_(PostModel.id.in_(tgi), PostModel.user.in_(user_info.follow))).order_by(
PostModel.id.desc()).paginate(page=1, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tgi)).order_by(PostModel.id.desc()).paginate(page=1, per_page=9)
else:
if len(user_info.follow) > 0:
posts = PostModel.query.filter_by(approved=True).filter(
PostModel.user.in_(user_info.follow)).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=1, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=1, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(page=1,
per_page=9)
tags = TagModel.query.all()
now = dt.datetime.now()
back_days = now - dt.timedelta(days=2)
posts_t = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).all()
analyze_posts = Analyze_Pages.query.filter(
Analyze_Pages.first_visited.between('{}-{}-{}'.format(back_days.year, back_days.month, back_days.day),
'{}-{}-{}'.format(now.year, now.month, now.day))).all()
trending_list = []
temp = {}
analyze_json = []
today = dt.datetime.today()
today_date = dt.date.today()
home_json = {}
posts_list = []
posts_json = {}
if tag:
home_json['info'] = {'name': tag_posts.name}
for post in posts.items:
posts_json['title'] = post.title
posts_json['id'] = post.id
posts_json['thumbnail'] = post.thumbnail
posts_json['posted_on'] = post.time_ago()
posts_json['author'] = {
'name': post.user_in.name,
'avatar': post.user_in.avatar,
'real_name': post.user_in.real_name
}
posts_json['likes'] = post.likes
posts_json['read_time'] = post.read_time
posts_json['link'] = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
posts_json['tags'] = TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
if token:
if post.id in user_info.saved_posts:
posts_json['saved'] = True
else:
posts_json['saved'] = False
posts_list.append(posts_json.copy())
posts_json.clear()
for post in posts_t:
published_on = post.posted_on
total_days = (today - published_on).days - 1
day_1 = 0
day_0 = 0
for analyze in analyze_posts:
if analyze.name == '/post/{post.title}/id={post.id}':
if (today_date - analyze.first_visited).days < 2:
day_1 += analyze.visits
if (today_date - analyze.first_visited).days < 1:
day_0 += analyze.visits
total = (day_1 + day_0) / 2
temp = {
'trending_value': total,
'id': post.id,
'title': post.title,
'link': (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id),
'author': {
'id': post.user_in.id,
'name': post.user_in.name,
'avatar': post.user_in.avatar
},
'tags': TagModel.query.with_entities(TagModel.name).filter(TagModel.post.contains([post.id])).all()
}
trending_list.append(temp.copy())
day_1 = 0
day_0 = 0
trending_list.sort(key=getItemForKey, reverse=True)
home_json['posts'] = posts_list
home_json['trending'] = trending_list[0:6]
tags = db.session.query(TagModel).with_entities(TagModel.name).order_by(
desc(func.array_length(TagModel.post, 1))).limit(10).all()
home_json['utilities'] = {'tags': tags}
if search:
home_json['utilities'] = {'tags': tags, 'search': search}
if not token:
return make_response(jsonify(home_json), 200)
tags = db.session.query(TagModel).with_entities(TagModel.name).filter(
~TagModel.name.in_(user_info.int_tags)).order_by(desc(func.array_length(TagModel.post, 1))).limit(10).all()
tags_ = []
for t in tags:
t, = t
t = t
tags_.append(t)
home_json['utilities'] = {'tags': tags_}
home_json['user'] = {'flw_tags': user_info.int_tags}
if search:
home_json['utilities'] = {'tags': tags_, 'search': search}
response = make_response(jsonify(home_json), 200)
return response
@api.route('/home/<int:page>', methods=['GET'])
def home_page(page):
t = request.args.get('t')
token = None
if t:
token = str(t).encode()
try:
user = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user_info = UserModel.query.filter_by(id=user['id']).first()
tag = request.args.get('tag')
mode = request.args.get('mode', False)
search = request.args.get('search')
if tag:
tag_posts = TagModel.query.filter_by(name=tag).first()
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tag_posts.post)).order_by(desc(PostModel.posted_on)).paginate(page=page, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(PostModel.id.in_(tag_posts.post)).order_by(
desc(PostModel.posted_on)).paginate(page=page, per_page=9)
elif mode:
if mode == 'saved':
if token:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(user_info.saved_posts)).order_by(desc(PostModel.posted_on)).paginate(page=page,
per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(page=page,
per_page=9)
elif mode == 'recent':
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=page, per_page=9)
else:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(
page=page, per_page=9)
elif mode == 'discuss' or mode == 'questions' or mode == 'tutorials':
if mode == 'discuss':
tg = TagModel.query.filter(TagModel.name.in_(['discuss', 'talk'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
elif mode == 'tutorials':
tg = TagModel.query.filter(TagModel.name.in_(['tutorial', 'howto', 'tutorials', 'how_to'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
elif mode == 'questions':
tg = TagModel.query.filter(TagModel.name.in_(['help', 'question'])).order_by(
desc(func.array_length(TagModel.post, 1))).all()
tgi = []
for t in tg:
tgi.extend(t.post)
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tgi)).order_by(PostModel.id.desc()).paginate(page=page, per_page=9)
else:
posts = PostModel.query.order_by(
desc(PostModel.posted_on)).filter_by(approved=True).filter(PostModel.id.in_(tgi)).order_by(
desc(PostModel.posted_on)).paginate(page=page, per_page=9)
elif search:
results, total = PostModel.search_post(request.args.get('search'), 1, 9, 'en')
if token:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(results)).order_by(desc(PostModel.posted_on)).paginate(page=page, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(PostModel.id.in_(results)).order_by(
desc(PostModel.posted_on)).paginate(page=page, per_page=9)
else:
if token:
if len(user_info.int_tags) > 0:
tg = TagModel.query.filter(TagModel.name.in_(user_info.int_tags)).order_by(
desc(func.array_length(TagModel.post, 1))).all()
tgi = []
for t in tg:
tgi.extend(t.post)
if len(user_info.follow) > 0:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
or_(PostModel.id.in_(tgi), PostModel.user.in_(user_info.follow))).order_by(
PostModel.id.desc()).paginate(page=page, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).filter(
PostModel.id.in_(tgi)).order_by(PostModel.id.desc()).paginate(page=page, per_page=9)
else:
if len(user_info.follow) > 0:
posts = PostModel.query.filter_by(approved=True).filter(
PostModel.user.in_(user_info.follow)).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=page, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).filter(
or_(PostModel.lang.like(user_info.lang), PostModel.lang.like('en'))).order_by(
PostModel.id.desc()).paginate(page=page, per_page=9)
else:
posts = PostModel.query.filter_by(approved=True).order_by(desc(PostModel.posted_on)).paginate(page=page,
per_page=9)
tags = TagModel.query.all()
home_json = {}
posts_list = []
posts_json = {}
for post in posts.items:
posts_json['title'] = post.title
posts_json['id'] = post.id
posts_json['thumbnail'] = post.thumbnail
posts_json['posted_on'] = post.time_ago()
posts_json['author'] = {
'name': post.user_in.name,
'avatar': post.user_in.avatar,
'real_name': post.user_in.real_name
}
posts_json['likes'] = post.likes
posts_json['read_time'] = post.read_time
posts_json['link'] = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
posts_json['tags'] = TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
if token:
if post.id in user_info.saved_posts:
posts_json['saved'] = True
else:
posts_json['saved'] = False
posts_list.append(posts_json.copy())
posts_json.clear()
home_json['posts'] = posts_list
if posts.has_next:
home_json['hasnext'] = True
else:
home_json['hasnext'] = False
response = make_response(jsonify(home_json), 200)
return response
@api.route('/post/<int:id>')
def post(id):
post = PostModel.query.filter_by(id=id).first()
tags = TagModel.query.all()
replies = ReplyModel.query.filter_by(post_id=id).all()
post_json = {}
reply_json = {}
user_posts_json = {}
user_posts = []
keywords = ''
post_json['title'] = post.title
post_json['link'] = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
post_json['id'] = post.id
post_json['text'] = post.text
post_json['likes'] = post.likes
post_json['closed'] = post.closed
post_json['thumbnail'] = post.thumbnail
for key in str(post.title).split(" "):
keywords += key + ','
post_json['keywords'] = keywords
post_json['description'] = cleanhtml(post.text)[:97]
if post.closed:
post_json['closed_on'] = post.closed_on
post_json['closed_by'] = post.closed_by_name()
post_json['author'] = {
'name': post.user_in.name,
'avatar': post.user_in.avatar,
'real_name': post.user_in.real_name,
'id': post.user_in.id,
'joined_on': str(post.user_in.join_date.ctime())[:-14] + ' ' + str(post.user_in.join_date.ctime())[20:],
'profession': post.user_in.profession,
'country': post.user_in.country_name,
'country_flag': post.user_in.country_flag,
'posts': []
}
post_json['tags'] = TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
post_json['replies'] = []
for reply in post.replyes:
reply_json['text'] = reply.text
reply_json['id'] = reply.id
reply_json['author'] = {
'name': reply.user_in.name,
'id': reply.user_in.id,
'avatar': reply.user_in.avatar,
'status': reply.user_in.status,
'status_color': reply.user_in.status_color
}
post_json['replies'].append(reply_json.copy())
reply_json.clear()
for post in post.user_in.posts:
user_posts_json = {
'id': post.id,
'title': post.title,
'link': (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id),
'author': {
'id': post.user_in.id,
'name': post.user_in.name,
'avatar': post.user_in.avatar
},
'tags': TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
}
user_posts.append(user_posts_json.copy())
post_json['author']['posts'] = user_posts[0:5]
token = request.args.get('t')
if token:
token = str(token).encode()
try:
user = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user_info = UserModel.query.filter_by(id=user['id']).first()
post_json['user'] = {'liked': False, 'following': False}
user_t = jwt.decode(token, key_c)
user = UserModel.query.filter_by(id=user_t['id']).first()
if id in user.liked_posts:
post_json['user']['liked'] = True
if post.user_in.id in user.follow:
post_json['user']['following'] = True
response = make_response(jsonify(post_json), 200)
return response
@api.route('/user/<string:name>')
def user(name):
user = UserModel.query.filter_by(name=name).first()
if user.followed:
followed = UserModel.query.filter(UserModel.id.in_(user.followed[0:5])).all()
user_json = {}
user_follow_list = []
user_follow_json = {}
posts_user_list = []
posts_temp = {}
for post in user.posts:
posts_temp['title'] = post.title
posts_temp['author'] = {
'name': user.name,
'avatar': user.avatar
}
posts_temp['posted_on'] = post.time_ago()
posts_temp['tags'] = TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
posts_temp['read_time'] = post.read_time
posts_temp['id'] = post.id
posts_temp['link'] = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
posts_user_list.append(posts_temp.copy())
user_json['id'] = user.id
user_json['name'] = user.name
user_json['real_name'] = user.real_name
user_json['avatar'] = user.avatar
user_json['cover'] = user.cover
user_json['bio'] = user.bio
user_json['profession'] = user.profession
user_json['country_name'] = user.country_name
user_json['country_flag'] = user.country_flag
user_json['join_date'] = str(user.join_date.ctime())[:-14] + ' ' + str(user.join_date.ctime())[20:]
user_json['followed_count'] = len(user.followed)
user_json['tags_check'] = True if len(user.int_tags) > 0 else False
user_json['tags'] = user.int_tags
user_json['post_count'] = PostModel.query.filter_by(user=user.id).filter_by(approved=True).count()
user_json['reply_count'] = ReplyModel.query.filter_by(user=user.id).count()
user_json['post_views'] = 53
user_json['posts'] = sorted(posts_user_list, key=lambda i: i['id'], reverse=True)
user_json['follow_check'] = True if len(user.followed) > 0 else False
if user.facebook or user.twitter or user.github or user.instagram or user.website:
user_json['social'] = True
if user.facebook:
user_json['facebook'] = user.facebook
if user.instagram:
user_json['instagram'] = user.instagram
if user.twitter:
user_json['twitter'] = user.twitter
if user.github:
user_json['github'] = user.github
if user.website:
user_json['website'] = user.website
if user.followed:
for f in followed:
user_follow_json['name'] = f.name
user_follow_json['real_name'] = f.real_name
user_follow_json['avatar'] = f.avatar
user_follow_list.append(user_follow_json.copy())
user_json['follows'] = user_follow_list
token = request.args.get('t')
if token:
token = str(token).encode()
try:
user_ = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user_ = jwt.decode(request.args.get('t'), key_c)
user_json['info'] = {'following': False}
if user_['id'] in user.followed:
user_json['info']['following'] = True
response = make_response(jsonify(user_json), 200)
return response
@api.route('/user/settings', methods=['GET', 'POST'])
def user_settings():
t = request.args.get('t')
token = None
if t:
token = str(t).encode()
try:
user = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user_info = UserModel.query.filter_by(id=user['id']).first()
else:
return make_response(jsonify({'error': 'No token'}), 401)
if request.method == 'POST':
data = json.loads(request.form['data'].encode().decode('utf-8'))
# if str(user_info.email).replace(" ", "") != str(data['email']).replace(" ",""):
user_info.real_name = data['real_name']
user_info.email = data['email']
user_info.bio = data['bio']
user_info.profession = data['profession']
user_info.instagram = data['instagram']
user_info.facebook = data['facebook']
user_info.github = data['github']
user_info.twitter = data['twitter']
user_info.website = data['website']
user_info.theme = data['theme']
user_info.theme_mode = data['theme_mode']
user_info.genre = data['genre']
if data['avatarimg']:
user_info.avatar = 'https://newapp.nl/static/profile_pics/' + save_img(user_info.id, 'profile')
if data['coverimg']:
user_info.cover = 'https://newapp.nl/static/profile_cover/' + save_img(user_info.id, 'cover')
db.session.commit()
token = jwt.encode({'id': user_info.id,
'perm_lvl': user_info.role,
'permissions': {
'post_permission': user_info.roleinfo.post_permission,
'delete_post_permission': user_info.roleinfo.delete_post_permission,
'delete_reply_permission': user_info.roleinfo.delete_reply_permission,
'edit_post_permission': user_info.roleinfo.edit_post_permission,
'edit_reply_permission': user_info.roleinfo.edit_reply_permission,
'close_post_permission': user_info.roleinfo.close_post_permission,
'admin_panel_permission': user_info.roleinfo.admin_panel_permission
},
'name': user_info.name,
'realname': user_info.real_name,
'avatar': user_info.avatar,
'theme': user_info.theme,
'theme_mode': user_info.theme_mode,
'epx': str(dt.datetime.now() + dt.timedelta(minutes=60))}, key_c)
return make_response(jsonify({'operation': 'success', 'token': token.decode('UTF-8')}), 200)
settings_json = {}
settings_json['name'] = user_info.name
settings_json['real_name'] = user_info.real_name
settings_json['email'] = user_info.email
settings_json['bio'] = user_info.bio
settings_json['profession'] = user_info.profession
settings_json['instagram'] = user_info.instagram
settings_json['facebook'] = user_info.facebook
settings_json['github'] = user_info.github
settings_json['twitter'] = user_info.twitter
settings_json['website'] = user_info.website
settings_json['genre'] = user_info.genre
settings_json['theme_mode'] = user_info.theme_mode
settings_json['theme'] = user_info.theme
settings_json['avatar'] = user_info.avatar
settings_json['cover'] = user_info.cover
return make_response(jsonify({'settings': settings_json}), 200)
@api.route('/register', methods=['GET', 'POST'])
def register():
data = request.json
if data is None or data['username'] is None or data['email'] is None or data['realname'] is None or data[
'password'] is None:
return jsonify({'register': 'Error'}), 401
check = UserModel.query.filter_by(name=data['username']).first()
if check is not None:
return jsonify({'register': 'Username taken'}), 401
check = UserModel.query.filter_by(email=data['email']).first()
if check is not None:
return jsonify({'register': 'Email taken'}), 401
token = serializer.dumps(data['email'], salt='register-confirm')
userInfo = httpagentparser.detect(request.headers.get('User-Agent'))
if request.environ.get('HTTP_X_FORWARDED_FOR') is None:
userIP = request.environ['REMOTE_ADDR']
else:
userIP = request.environ['HTTP_X_FORWARDED_FOR']
ip_user = Ip_Coordinates.query.filter_by(ip=userIP).first()
if ip_user is None:
resp = requests.get(
('https://www.iplocate.io/api/lookup/{}').format(userIP))
userLoc = resp.json()
iso_code = userLoc['country_code']
country_name = userLoc['country']
rest = False
else:
resp = requests.get(
("https://restcountries.eu/rest/v2/alpha/{}").format(ip_user.location.iso_code))
userLoc = resp.json()
country_name = userLoc['name']
iso_code = ip_user.location.iso_code
rest = True
if rest:
userLanguage = userLoc['languages'][0]['iso639_1']
else:
api_2 = requests.get(
("https://restcountries.eu/rest/v2/alpha/{}").format(iso_code))
result_2 = api_2.json()
userLanguage = result_2['languages'][0]['iso639_1']
msg = Message('Confirm Email Registration', sender='[email protected]', recipients=[data['email']])
link = 'https://newapp.nl' + url_for('users.confirm_register', email=data['email'], token=token)
msg.html = render_template('email_register.html', register=link, email='[email protected]')
mail.send(msg)
new_user = UserModel(
None,
None,
data['username'].lower(),
data['realname'],
data['email'].lower(),
data['password'],
"https://www.component-creator.com/images/testimonials/defaultuser.png",
None,
None,
None,
False,
userIP,
userInfo['browser']['name'],
str(country_name),
str(iso_code).lower(),
str(userLanguage).lower(),
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
'Light',
None,
None,
None,
'system',
None
)
db.session.add(new_user)
db.session.commit()
response = make_response(jsonify({'register': 'success'}), 200)
return response
@api.route('/register/confirm', methods=['GET'])
def confirm():
try:
email = serializer.loads(request.args.get('token'), salt='register-confirm', max_age=300)
except SignatureExpired:
return jsonify({'confirm': 'Invalid Token'}), 401
except BadTimeSignature:
return jsonify({'confirm': 'Invalid Token'}), 401
except BadSignature:
return jsonify({'confirm': 'Invalid Token'}), 401
users = db.session.query(UserModel).filter_by(email=request.args.get('email')).first()
users.activated = True
db.session.commit()
return jsonify({'confirm': 'success'}), 200
@api.route('/register/check/username/<string:user>')
def check_username(user):
check = UserModel.query.filter_by(name=user).first()
if check is not None:
json = {'check': True}
else:
json = {'check': False}
return jsonify(json), 200
@api.route('/register/check/email/<string:email>')
def check_email(email):
check = UserModel.query.filter_by(email=email).first()
if check is not None:
json = {'check': True}
else:
json = {'check': False}
return jsonify(json), 200
@api.route('/login', methods=['GET'])
def login():
auth = request.authorization
if not auth:
return make_response(jsonify({'login': 'No credentials'}), 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
if not auth.username:
return make_response(jsonify({'login': 'No username'}), 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
user = UserModel.query.filter_by(name=auth.username).first()
if not user:
user = UserModel.query.filter_by(email=auth.username).first()
if not user:
return make_response(jsonify({'login': 'No user', 'camp': 'user'}), 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
if not auth.password:
return make_response(jsonify({'login': 'No password'}), 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
if bcrypt.check_password_hash(user.password, auth.password) == False:
return make_response(jsonify({'login': 'Wrong password', 'camp': 'password'}), 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
token = jwt.encode({'id': user.id,
'perm_lvl': user.role,
'permissions': {
'post_permission': user.roleinfo.post_permission,
'delete_post_permission': user.roleinfo.delete_post_permission,
'delete_reply_permission': user.roleinfo.delete_reply_permission,
'edit_post_permission': user.roleinfo.edit_post_permission,
'edit_reply_permission': user.roleinfo.edit_reply_permission,
'close_post_permission': user.roleinfo.close_post_permission,
'admin_panel_permission': user.roleinfo.admin_panel_permission
},
'name': user.name,
'realname': user.real_name,
'avatar': user.avatar,
'theme': user.theme,
'theme_mode': user.theme_mode,
'epx': str(dt.datetime.now() + dt.timedelta(minutes=60))}, key_c)
return make_response(jsonify({'login': token.decode('UTF-8')}), 200)
@api.route('/follow-tag/<string:tag>')
def fllw_tag(tag):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
int_tags = list(user.int_tags)
if int_tags is not None:
if tag in int_tags:
int_tags.remove(tag)
response = jsonify({'operation': 'unfollowed'})
else:
response = jsonify({'operation': 'followed'})
int_tags.append(tag)
else:
response = jsonify({'operation': 'followed'})
int_tags.append(tag)
user.int_tags = int_tags
db.session.commit()
return response
@api.route('/like-post/<int:id>')
def like_post(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
like = list(user.liked_posts)
post = PostModel.query.filter_by(id=id).first()
not_id = str(db.session.execute(Sequence('notifications_id_seq')))
if like is not None:
if id in like:
like.remove(id)
response = jsonify({'operation': 'unliked'})
post.likes = post.likes - 1
notify = Notifications_Model(
int(not_id),
user.id,
'{} unliked your post'.format(user.name),
post.title,
'/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(
post.id) + '?notification_id=' + not_id,
post.user_in.id,
False,
None,
'unlike'
)
send_notification(post.user_in.id, {
'text': '@{} unliked your post'.format(user.name),
'link': '/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id),
'icon': user.avatar,
'id': not_id
})
not_check = Notifications_Model.query.filter_by(
title='{} unliked your post'.format(user.name)).filter_by(body=str(post.title)).first()
else:
response = jsonify({'operation': 'liked'})
post.likes = post.likes + 1
like.append(id)
notify = Notifications_Model(
int(not_id),
user.id,
'{} liked your post'.format(user.name),
post.title,
'/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(
post.id) + '?notification_id=' + not_id,
post.user_in.id,
False,
None,
'like'
)
send_notification(post.user_in.id, {
'text': '@{} liked your post'.format(user.name),
'link': '/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id),
'icon': user.avatar,
'id': not_id
})
not_check = Notifications_Model.query.filter_by(
title='{} liked your post'.format(user.name)).filter_by(body=post.title).first()
else:
response = jsonify({'operation': 'liked'})
post.likes = post.likes + 1
like.append(id)
notify = Notifications_Model(
int(not_id),
user.id,
'{} liked your post'.format(user.name),
post.title,
'/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(
post.id) + '?notification_id=' + not_id,
post.user_in.id,
False,
None,
'like'
)
send_notification(post.user_in.id, {
'text': '@{} liked your post'.format(user.name),
'link': '/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id),
'icon': user.avatar,
'id': not_id
})
not_check = Notifications_Model.query.filter_by(
title='{} liked your post'.format(user.name)).filter_by(body=post.title).first()
if not_check is not None:
not_check.checked = False
else:
db.session.add(notify)
user.liked_posts = like
db.session.commit()
return make_response(response, 200)
@api.route('/follow-user/<int:id>')
def follow_user(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
not_id = str(db.session.execute(Sequence('notifications_id_seq')))
follow = list(user.follow)
followed = UserModel.query.filter_by(id=id).first()
user_followed = list(followed.followed)
if follow is not None:
if id in follow:
follow.remove(id)
user_followed.remove(user.id)
response = jsonify({'operation': 'unfollowed'})
notify = Notifications_Model(
int(not_id),
user.id,
'{} unfolowed you'.format(user.name),
user.name,
'/user/' + str(user.name) + '?notification_id=' + not_id,
id,
False,
None,
'follow'
)
send_notification(id, {
'text': '@{} unfolowed you'.format(user.name),
'link': '/user/' + str(user.name),
'icon': user.avatar,
'id': not_id
})
else:
follow.append(id)
user_followed.append(user.id)
response = jsonify({'operation': 'followed'})
notify = Notifications_Model(
int(not_id),
user.id,
'{} started folowing you'.format(user.name),
user.name,
'/user/' + str(user.name),
id,
False,
None,
'follow'
)
send_notification(id, {
'text': '@{} started folowing you'.format(user.name),
'link': '/user/' + str(user.name),
'icon': user.avatar,
'id': not_id
})
else:
follow.append(id)
user_followed.append(user.id)
response = jsonify({'operation': 'followed'})
notify = Notifications_Model(
int(not_id),
user.id,
'{} started folowing you'.format(user.name),
user.name,
'/user/' + str(user.name),
id,
False,
None,
'follow'
)
send_notification(id, {
'text': '@{} started folowing you'.format(user.name),
'link': '/user/' + str(user.name),
'icon': user.avatar,
'id': not_id
})
db.session.add(notify)
user.follow = follow
followed.followed = user_followed
db.session.commit()
return make_response(response, 200)
@api.route('/save-post/<int:id>')
def save_post(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
posts = list(user.saved_posts)
if posts is not None:
if id in posts:
posts.remove(id)
response = jsonify({'operation': 'deleted'})
else:
response = jsonify({'operation': 'saved'})
posts.append(id)
else:
response = jsonify({'operation': 'saved'})
posts.append(id)
user.saved_posts = posts
db.session.commit()
return make_response(response, 200)
@api.route('/newreply', methods=['POST'])
def newreply():
if request.method != 'POST':
return make_response(jsonify({'operation': 'error', 'error': 'Invalid method'}), 401)
data = request.json
if not data['token'] or not data['post_id'] or not data['content']:
return make_response(jsonify({'operation': 'error', 'error': 'Missing data'}), 401)
try:
decoded = jwt.decode(str(data['token']).encode(), key_c)
except:
return make_response(jsonify({'operation': 'error', 'error': 'Invalid token'}), 401)
new_reply = ReplyModel(None, data['content'], data['post_id'], decoded['id'], None)
not_id = str(db.session.execute(Sequence('notifications_id_seq')))
index = db.session.execute(Sequence('replyes_id_seq'))
post = PostModel.query.filter_by(id=data['post_id']).first()
notify = Notifications_Model(
int(not_id),
decoded['id'],
'{} replied to your post'.format(decoded['name']),
post.title,
'/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(
post.id) + '?notification_id=' + str(not_id),
post.user_in.id,
False,
None,
'reply'
)
send_notification(post.user_in.id, {
'text': '@{} replied to your post'.format(decoded['name']),
'link': '/post/' + (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id) + '#reply_' + str(
index),
'icon': decoded['avatar'],
'id': not_id
})
db.session.add(new_reply)
db.session.commit()
db.session.add(notify)
db.session.commit()
return make_response(jsonify({'operation': 'success', 'reply_id': index}), 200)
@api.route('/newpost', methods=['POST'])
def newpost():
if request.method != 'POST':
return make_response(jsonify({'operation': 'error', 'error': 'Invalid method'}), 401)
data = json.loads(request.form['data'].encode().decode('utf-8'))
if not data['token'] or not data['title'] or not data['content'] or not data['title'] or not data['tags']:
return make_response(jsonify({'operation': 'error', 'error': 'Missing data'}), 401)
try:
decoded = jwt.decode(str(data['token']).encode(), key_c)
user_ = UserModel.query.filter_by(id=decoded['id']).first()
except:
return make_response(jsonify({'operation': 'error', 'error': 'Invalid token'}), 401)
index = db.session.execute(Sequence('posts_id_seq'))
not_id = str(db.session.execute(Sequence('notifications_id_seq')))
thumbnail_link = None
if data['image']:
thumbnail = save_img(index)
thumbnail_link = 'https://newapp.nl' + url_for('static', filename='thumbail_post/{}'.format(thumbnail))
lang = translate.getLanguageForText(str(cleanhtml(data['content'])).encode('utf-8-sig'))
new_post = PostModel(
index,
data['title'],
data['content'],
None,
None,
decoded['id'],
None,
True,
False,
None,
None,
str(lang.iso_tag).lower(),
thumbnail_link,
None,
str(readtime.of_html(data['content']))
)
tags = []
tag_p = str(data['tags']).lower()
tag = tag_p.replace(" ", "")
tags = tag.split(",")
for t in tags:
temp = TagModel.query.filter_by(name=str(t).lower()).first()
if temp is not None:
d = []
d = list(temp.post)
d.append(index)
temp.post = d
else:
tag = TagModel(
None,
str(t).lower(),
[index]
)
db.session.add(tag)
for user in user_.followed:
notification = Notifications_Model(
int(not_id),
decoded['id'],
'{} shared a new post'.format(decoded['name']),
str(data['title']),
'/post/' + (str(data['title']).replace(' ', '-')).replace('?', '') + '-' + str(index),
user,
None,
None,
'post'
)
send_notification(post.user_in.id, {
'text': '@{} shared a new post'.format(decoded['name']),
'link': '/post/' + (str(data['title']).replace(' ', '-')).replace('?', '') + '-' + str(index),
'icon': decoded['avatar'],
'id': not_id
})
db.session.add(notification)
db.session.add(new_post)
db.session.commit()
return make_response(jsonify({'operation': 'success',
'link': '/post/' + (str(data['title']).replace(' ', '-')).replace('?',
'') + '-' + str(
index)}), 200)
@api.route('/post/delete/<int:id>')
def delete_post(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
post = PostModel.query.filter_by(id=id).first()
if user.id != post.user_in.id and user.roleinfo.delete_post_permission == False:
return make_response(jsonify({'operation': 'failed'}), 401)
if post.thumbnail:
try:
picture_fn = 'post_' + str(id) + '.webp'
os.remove(os.path.join(
config['UPLOAD_FOLDER_POST'], picture_fn))
except:
pass
PostModel.query.filter_by(id=id).delete()
ReplyModel.query.filter_by(post_id=id).delete()
tags = TagModel.query.filter(
TagModel.post.contains([id])).all()
for t in tags:
x = list(t.post)
x.remove(id)
t.post = x
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route('/post/close/<int:id>')
def close_post(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
post = PostModel.query.filter_by(id=id).first()
if not user.roleinfo.close_post_permission:
return make_response(jsonify({'operation': 'failed'}), 401)
post.closed = True
post.closed_on = datetime.now()
post.closed_by = user.id
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route("/post/edit/<int:id>", methods=['GET', 'POST'])
def edit_post(id):
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
post = PostModel.query.filter_by(id=id).first()
if request.method == 'POST':
data = request.json
post.text = data['text']
post.title = data['title']
post_link = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
db.session.commit()
return make_response(jsonify({'operation': 'success', 'link': post_link}), 200)
post_json = {}
post_json['title'] = post.title
post_json['text'] = post.text
post_json['id'] = post.id
return make_response(jsonify(post_json), 200)
@api.route("/reply/delete")
def delete_reply():
token = request.args.get('t')
reply_id = request.args.get('id')
if not token or not reply_id:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
reply = ReplyModel.query.filter_by(id=reply_id).first()
if user.roleinfo.delete_reply_permission == False and user.id != reply.user:
return make_response(jsonify({'operation': 'no permission'}), 401)
ReplyModel.query.filter_by(id=reply_id).delete()
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route("/reply/edit", methods=['POST'])
def edit_reply():
if request.method != 'POST':
return make_response(jsonify({'operation': 'error', 'error': 'Invalid method'}), 401)
data = request.json
if not data['token'] or not data['r_id'] or not data['content']:
return make_response(jsonify({'operation': 'error', 'error': 'Missing data'}), 401)
try:
decoded = jwt.decode(str(data['token']).encode(), key_c)
except:
return make_response(jsonify({'operation': 'error', 'error': 'Invalid token'}), 401)
reply = ReplyModel.query.filter_by(id=data['r_id']).first()
reply.text = data['content']
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route('/notifications')
def notifications():
token = request.args.get('t')
extended = request.args.get('ex')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
if extended == 'true':
notifications = {'notify': {'new': [], 'posts': [], 'comments': [], 'likes': [], 'follows': []},
'count_new': user.get_not_count(user.id)}
temp = {}
for val, n in enumerate(user.n_receiver):
if val == 50:
break
temp['body'] = n.body
temp['checked'] = n.checked
temp['id'] = n.id
temp['title'] = n.title
temp['link'] = n.link
temp['category'] = n.category
temp['author'] = {
'avatar': n.author.avatar,
'name': n.author.name
}
temp['time_ago'] = n.time_ago()
if n.checked == False:
notifications['notify']['new'].append(temp.copy())
if n.category == 'post':
notifications['notify']['posts'].append(temp.copy())
elif n.category == 'reply':
notifications['notify']['comments'].append(temp.copy())
elif n.category == 'like':
notifications['notify']['likes'].append(temp.copy())
elif n.category == 'follow':
notifications['notify']['follows'].append(temp.copy())
notifications['notify']['new'].sort(key=getItemForKeyN, reverse=True)
notifications['notify']['posts'].sort(key=getItemForKeyN, reverse=True)
notifications['notify']['comments'].sort(key=getItemForKeyN, reverse=True)
notifications['notify']['likes'].sort(key=getItemForKeyN, reverse=True)
notifications['notify']['follows'].sort(key=getItemForKeyN, reverse=True)
else:
limit = user.get_not_count(user.id) if user.get_not_count(user.id) < 10 else 10
notifications = {'notify': [], 'count_new': user.get_not_count(user.id), 'count': limit}
temp = {}
for n in user.n_receiver:
if n.checked == False:
temp['body'] = n.body
temp['checked'] = n.checked
temp['id'] = n.id
temp['title'] = n.title
temp['link'] = n.link
temp['category'] = n.category
temp['author'] = {
'avatar': n.author.avatar,
'name': n.author.name
}
temp['time_ago'] = n.time_ago()
notifications['notify'].append(temp.copy())
notifications['notify'].sort(key=getItemForKeyN, reverse=True)
notifications['notify'] = notifications['notify'][:limit]
return make_response(jsonify(notifications), 200)
@api.route("/notifications/check")
def check_not():
token = request.args.get('t')
notification_id = request.args.get('not_id')
if not token or not notification_id:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
notification = Notifications_Model.query.filter_by(id=notification_id).first()
if notification is None:
return make_response(jsonify({'operation': 'failed'}), 401)
if notification.for_user != user.id:
return make_response(jsonify({'operation': 'failed'}), 401)
notification.checked = True
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route("/save-subscription", methods=['POST'])
def sub():
if request.method != 'POST':
return make_response(jsonify({'operation': 'failed'}), 401)
data = request.json
user_t = jwt.decode(data['user'], key_c)
sub = Subscriber(None, user_t['id'], None, None, str(data['sub_info']), True)
db.session.add(sub)
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route("/send-notification")
def notif():
check = Subscriber.query.filter_by(user=2).filter_by(is_active=True).all()
for c in check:
try:
sub = (str(c.subscription_info).encode().decode('utf-8')).replace("'", '"')
sub = sub.replace("None", "null")
send_web_push(json.loads(sub), "hello")
except:
pass
db.session.commit()
return make_response(jsonify({'operation': 'success'}), 200)
@api.route("/admin/dashboard")
def dashboard():
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
if not user.roleinfo.admin_panel_permission:
return make_response(jsonify({'operation': 'no permission'}), 401)
sessions = db.session.query(Analyze_Session).order_by(Analyze_Session.id).all()
now = dt.datetime.now()
sess = {}
sess_old = {}
label_days = []
referer = CustomDict()
country = CustomDict()
countries = CustomDict()
replies = {'old': 0, 'new': 0, 'perc': 0}
views = {'old': 0, 'new': 0, 'perc': 0}
users = {'old': 0, 'new': 0, 'perc': 0}
posts = {'old': 0, 'new': 0, 'perc': 0}
shares = {'old': 0, 'new': 0, 'perc': 0}
devices = {'old': {'mobile': 0, 'computer': 0}, 'new': {'mobile': 0, 'computer': 0}, 'perc': {'mobile': 0, 'computer': 0}}
months = {
'01': 'Junuary',
'02': 'February',
'03': 'March',
'04': 'April',
'05': 'May',
'06': 'June',
'07': 'July',
'08': 'August',
'09': 'September',
'10': 'October',
'11': 'November',
'12': 'December'
}
back_days = now - dt.timedelta(days=15)
back_perc = back_days - dt.timedelta(days=15)
pages = db.session.query(Analyze_Pages.name, func.count(Analyze_Pages.name).label('views')).filter(
Analyze_Pages.first_visited.between('{}-{}-{}'.format(back_days.year, back_days.month, back_days.day),
'{}-{}-{}'.format(now.year, now.month, now.day))).group_by(
Analyze_Pages.name).order_by(
func.count(Analyze_Pages.name).desc()).limit(10).all()
for session in sessions:
if session.referer is not None:
year, month, day = str(session.created_at).split("-")
date = dt.datetime(int(year), int(month), int(day))
if int(year) == int(now.year):
if now >= date >= back_days and session.bot == True:
if str(session.browser) == 'TwitterBot' or str(session.browser) == 'FacebookExternalHit':
shares['new'] += 1
if back_days >= date >= back_perc and session.bot == True:
if str(session.browser) == 'TwitterBot' or str(session.browser) == 'FacebookExternalHit':
shares['old'] += 1
if now >= date >= back_days and session.bot == False:
if str(session.os).lower() == 'android' or str(session.os).lower() == 'ios':
devices['new']['mobile'] += 1
else:
devices['new']['computer'] += 1
try:
sess[calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(
day)] += 1
except:
sess.__setitem__(
calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(day),
1)
if str(day) not in label_days and str(months[str(month)] + ' ' + day) not in label_days:
if int(day) == 1:
label_days.append(months[str(month)] + ' ' + day)
else:
label_days.append(str(day))
if str(session.referer) != 'None':
try:
if int(day) == 1:
referer[str(session.referer)][months[str(month)] + ' ' + day] += 1
else:
referer[str(session.referer)][str(day)] += 1
except:
if int(day) == 1:
referer[str(session.referer)][months[str(month)] + ' ' + day] = 1
else:
referer[str(session.referer)][str(day)] = 1
if str(session.iso_code) != 'None':
try:
country[str(session.iso_code)] += 1
except:
country[str(session.iso_code)] = 1
countries[str(session.iso_code)] = str(session.country)
if back_days >= date >= back_perc and session.bot == False:
if str(session.os).lower() == 'android' or str(session.os).lower() == 'ios':
devices['old']['mobile'] += 1
else:
devices['old']['computer'] += 1
try:
sess_old[calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(
day)] += 1
except:
sess_old.__setitem__(
calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(day),
1)
devices['perc']['mobile'] = round(((devices['new']['mobile'] - devices['old']['mobile']) - devices['old']['mobile']) % 100,2)
devices['perc']['computer'] = round(((devices['new']['computer'] - devices['old']['computer']) - devices['old']['computer']) % 100,2)
perc = Analyze_Pages.perc_replies()
if perc > 0:
replies['perc'] = str(perc)+f'% higher than in the last 15 days'
elif perc == 0:
replies['perc'] = str(perc)+f'% same in the last 15 days'
else:
replies['perc'] = str((perc*(-1)))+f'% lower than in the last 15 days'
perc = Analyze_Pages.perc_views()
if perc > 0:
views['perc'] = str(perc)+f'% higher than in the last 15 days'
elif perc == 0:
views['perc'] = str(perc)+f'% same in the last 15 days'
else:
views['perc'] = str((perc*(-1)))+f'% lower than in the last 15 days'
perc = Analyze_Pages.perc_users()
if perc > 0:
users['perc'] = str(perc)+f'% higher than in the last 15 days'
elif perc == 0:
users['perc'] = str(perc)+f'% same in the last 15 days'
else:
users['perc'] = str((perc*(-1)))+f'% lower than in the last 15 days'
perc = Analyze_Pages.perc_posts()
if perc > 0:
posts['perc'] = str(perc)+f'% higher than in the last 15 days'
elif perc == 0:
posts['perc'] = str(perc)+f'% same in the last 15 days'
else:
posts['perc'] = str((perc*(-1)))+f'% lower than in the last 15 days'
replies['new'] = Analyze_Pages.replies_15_days()
replies['old'] = Analyze_Pages.replies_30_days()
replies['p_perc'] = abs(Analyze_Pages.perc_replies()) if Analyze_Pages.perc_replies() <= 100 else 100
views['new'] = Analyze_Pages.views_15_days()
views['old'] = Analyze_Pages.views_30_days()
views['p_perc'] = abs(Analyze_Pages.perc_views()) if Analyze_Pages.perc_views() <= 100 else 100
users['new'] = Analyze_Pages.user_15_days()
users['old'] = Analyze_Pages.user_30_days()
users['p_perc'] = abs(Analyze_Pages.perc_users()) if Analyze_Pages.perc_users() <= 100 else 100
posts['new'] = Analyze_Pages.posts_15_days()
posts['old'] = Analyze_Pages.posts_30_days()
posts['p_perc'] = abs(Analyze_Pages.perc_posts()) if Analyze_Pages.perc_posts() <= 100 else 100
main_data = {'replies': replies, 'views': views, 'users': users, 'posts': posts, 'devices': devices}
views_data = {'new': sess, 'old': sess_old, 'days': label_days}
return make_response(jsonify({'operation': 'success', 'main_data': main_data, 'views': views_data}), 200)
@api.route("/admin/posts")
def a_posts():
token = request.args.get('t')
if not token:
return make_response(jsonify({'operation': 'failed'}), 401)
try:
user_t = jwt.decode(token, key_c)
except:
return make_response(jsonify({'operation': 'failed'}), 401)
user = UserModel.query.filter_by(id=user_t['id']).first()
if not user.roleinfo.admin_panel_permission:
return make_response(jsonify({'operation': 'no permission'}), 401)
now = dt.datetime.now()
back_days = now - dt.timedelta(days=15)
back_perc = back_days - dt.timedelta(days=15)
posts = PostModel.query.filter(PostModel.posted_on.between('{}-{}-{}'.format(back_perc.year, back_perc.month, back_perc.day), '{}-{}-{}'.format(now.year, now.month, now.day))).all()
unapproved = PostModel.query.filter_by(approved=False).all()
months = {
'01': 'Junuary',
'02': 'February',
'03': 'March',
'04': 'April',
'05': 'May',
'06': 'June',
'07': 'July',
'08': 'August',
'09': 'September',
'10': 'October',
'11': 'November',
'12': 'December'
}
label_days = []
posts_new = {}
posts_old = {}
posts_unapproved = []
posts_json = {}
for post in posts:
year, month, day = str(post.posted_on).split("-")
day, hour = day.split(" ")
date = dt.datetime(int(year), int(month), int(day))
if now >= date >= back_days:
try:
posts_new[calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(day)] += 1
except:
posts_new.__setitem__(
calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(day),
1)
if str(day) not in label_days and str(months[str(month)] + ' ' + day) not in label_days:
if int(day) == 1:
label_days.append(months[str(month)] + ' ' + day)
else:
label_days.append(str(day))
if back_days >= date >= back_perc:
try:
posts_old[calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(
day)] += 1
except:
posts_old.__setitem__(
calendar.day_name[int(calendar.weekday(int(year), int(month), int(day)))] + ' ' + str(day),
1)
for post in unapproved:
posts_json['title'] = post.title
posts_json['id'] = post.id
posts_json['thumbnail'] = post.thumbnail
posts_json['posted_on'] = post.time_ago()
posts_json['author'] = {
'name': post.user_in.name,
'avatar': post.user_in.avatar,
'real_name': post.user_in.real_name
}
posts_json['likes'] = post.likes
posts_json['read_time'] = post.read_time
posts_json['link'] = (str(post.title).replace(' ', '-')).replace('?', '') + '-' + str(post.id)
posts_json['tags'] = TagModel.query.with_entities(TagModel.name).filter_by(post = post.id).all()
posts_unapproved.append(posts_json.copy())
posts_json.clear()
return make_response(jsonify({'operation': 'success', 'posts': {'old': posts_old, 'new': posts_new}, 'unapproved': posts_unapproved}), 200)
| cleanhtml |
hybrid_decrypt.rs | // Copyright 2020 The Tink-Rust Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
//! Hybrid decryption.
/// `HybridDecrypt` is the interface for hybrid decryption.
///
/// Hybrid Encryption combines the efficiency of symmetric encryption with the convenience of
/// public-key encryption: to encrypt a message a fresh symmetric key is generated and used to
/// encrypt the actual plaintext data, while the recipient’s public key is used to encrypt the
/// symmetric key only, and the final ciphertext consists of the symmetric ciphertext and the
/// encrypted symmetric key.
///
/// ## WARNING
///
/// Hybrid Encryption does not provide authenticity, that is the recipient of an encrypted message
/// does not know the identity of the sender. Similar to general public-key encryption schemes the
/// security goal of Hybrid Encryption is to provide privacy only. In other words, Hybrid Encryption
/// is secure if and only if the recipient can accept anonymous messages or can rely on other
/// mechanisms to authenticate the sender.
///
/// ## Security guarantees
///
/// The functionality of Hybrid Encryption is represented as a pair of primitives (traits):
/// `HybridEncrypt` for encryption of data, and `HybridDecrypt` for decryption.
/// Implementations of these traits are secure against adaptive chosen ciphertext attacks. In
/// addition to plaintext the encryption takes an extra parameter contextInfo, which
/// usually is public data implicit from the context, but should be bound to the resulting
/// ciphertext, i.e. the ciphertext allows for checking the integrity of `context_info` (but
/// there are no guarantees wrt. the secrecy or authenticity of `context_info`).
///
/// `context_info` can be empty, but to ensure the correct decryption of a ciphertext
/// the same value must be provided for the decryption operation as was used during encryption
/// (`HybridEncrypt`).
///
/// A concrete implementation of this trait can implement the binding of `context_info` to
/// the ciphertext in various ways, for example:
///
/// - use `context_info` as "associated data"-input for the employed AEAD symmetric encryption
/// (cf. [RFC 5116](https://tools.ietf.org/html/rfc5116)).
/// - use `context_info` as "CtxInfo"-input for HKDF (if the implementation uses HKDF as key
/// derivation function, cf. [RFC 5869](https://tools.ietf.org/html/rfc5869)).
pub trait HybridDecrypt: HybridDecryptBoxClone {
/// Decrypt ciphertext verifying the integrity of `context_info`.
/// Returns resulting plaintext
fn decrypt(&self, ciphertext: &[u8], context_info: &[u8]) -> Result<Vec<u8>, crate::TinkError>;
}
/// Trait bound to indicate that primitive trait objects should support cloning |
/// Default implementation of the box-clone trait bound for any underlying
/// concrete type that implements [`Clone`].
impl<T> HybridDecryptBoxClone for T
where
T: 'static + HybridDecrypt + Clone,
{
fn box_clone(&self) -> Box<dyn HybridDecrypt> {
Box::new(self.clone())
}
} | /// themselves as trait objects.
pub trait HybridDecryptBoxClone {
fn box_clone(&self) -> Box<dyn HybridDecrypt>;
} |
path.rs | //! Path Filters
//!
//! The filters here work on the "path" of requests.
//!
//! - [`path`](./fn.path.html) matches a specific segment, like `/foo`.
//! - [`param`](./fn.param.html) tries to parse a segment into a type, like `/:u16`.
//! - [`end`](./fn.end.html) matches when the path end is found.
//! - [`path!`](../../macro.path.html) eases combining multiple `path` and `param` filters.
//!
//! # Routing
//!
//! Routing in warp is simple yet powerful.
//!
//! First up, matching a single segment:
//!
//! ```
//! use warp::Filter;
//!
//! // GET /hi
//! let hi = warp::path("hi").map(|| {
//! "Hello, World!"
//! });
//! ```
//!
//! How about multiple segments? It's easiest with the `path!` macro:
//!
//! ```
//! # #[macro_use] extern crate warp; fn main() {
//! # use warp::Filter;
//! // GET /hello/from/warp
//! let hello_from_warp = path!("hello" / "from" / "warp").map(|| {
//! "Hello from warp!"
//! });
//! # }
//! ```
//!
//! Neat! But do I handle **parameters** in paths?
//!
//! ```
//! # #[macro_use] extern crate warp; fn main() {
//! # use warp::Filter;
//! // GET /sum/:u32/:u32
//! let sum = path!("sum" / u32 / u32).map(|a, b| {
//! format!("{} + {} = {}", a, b, a + b)
//! });
//! # }
//! ```
//!
//! In fact, any type that implements `FromStr` can be used, in any order:
//!
//! ```
//! # #[macro_use] extern crate warp; fn main() {
//! # use warp::Filter;
//! // GET /:u16/times/:u16
//! let times = path!(u16 / "times" / u16).map(|a, b| {
//! format!("{} times {} = {}", a, b, a * b)
//! });
//! # }
//! ```
//!
//! Oh shoot, those math routes should be **mounted** at a different path,
//! is that possible? Yep!
//!
//! ```
//! # use warp::Filter;
//! # let sum = warp::any().map(warp::reply);
//! # let times = sum.clone();
//! // GET /math/sum/:u32/:u32
//! // GET /math/:u16/times/:u16
//! let math = warp::path("math");
//! let math_sum = math.and(sum);
//! let math_times = math.and(times);
//! ```
//!
//! What! `and`? What's that do?
//!
//! It combines the filters in a sort of "this and then that" order. In fact,
//! it's exactly what the `path!` macro has been doing internally.
//!
//! ```
//! # use warp::Filter;
//! // GET /bye/:string
//! let bye = warp::path("bye")
//! .and(warp::path::param())
//! .map(|name: String| {
//! format!("Good bye, {}!", name)
//! });
//! ```
//!
//! Ah, so, can filters do things besides `and`?
//!
//! Why, yes they can! They can also `or`! As you might expect, `or` creates a
//! "this or else that" chain of filters. If the first doesn't succeed, then
//! it tries the other.
//!
//! So, those `math` routes could have been **mounted** all as one, with `or`.
//!
//!
//! ```
//! # use warp::Filter;
//! # let sum = warp::any().map(warp::reply);
//! # let times = sum.clone();
//! // GET /math/sum/:u32/:u32
//! // GET /math/:u16/times/:u16
//! let math = warp::path("math")
//! .and(sum.or(times));
//! ```
//!
//! It turns out, using `or` is how you combine everything together into a
//! single API.
//!
//! ```
//! # use warp::Filter;
//! # let hi = warp::any().map(warp::reply);
//! # let hello_from_warp = hi.clone();
//! # let bye = hi.clone();
//! # let math = hi.clone();
//! // GET /hi
//! // GET /hello/from/warp
//! // GET /bye/:string
//! // GET /math/sum/:u32/:u32
//! // GET /math/:u16/times/:u16
//! let routes = hi
//! .or(hello_from_warp)
//! .or(bye)
//! .or(math);
//! ```
use std::fmt;
use std::str::FromStr;
use http::uri::PathAndQuery;
use filter::{filter_fn, one, Filter, One, Tuple};
use never::Never;
use reject::{self, Rejection};
use route::Route;
/// Create an exact match path segment `Filter`.
///
/// This will try to match exactly to the current request path segment.
///
/// # Panics
///
/// Exact path filters cannot be empty, or contain slashes.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// // Matches '/hello'
/// let hello = warp::path("hello")
/// .map(|| "Hello, World!");
/// ```
pub fn path(p: &'static str) -> impl Filter<Extract = (), Error = Rejection> + Copy {
assert!(!p.is_empty(), "exact path segments should not be empty");
assert!(
!p.contains('/'),
"exact path segments should not contain a slash: {:?}",
p
);
segment(move |seg| {
trace!("{:?}?: {:?}", p, seg);
if seg == p {
Ok(())
} else {
Err(reject::not_found())
}
})
}
#[doc(hidden)]
#[deprecated(note = "renamed to warp::path::end")]
pub fn index() -> impl Filter<Extract = (), Error = Rejection> + Copy {
end()
}
/// Matches the end of a route.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// // Matches '/'
/// let hello = warp::path::end()
/// .map(|| "Hello, World!");
/// ```
pub fn end() -> impl Filter<Extract = (), Error = Rejection> + Copy {
filter_fn(move |route| {
if route.path().is_empty() {
Ok(())
} else {
Err(reject::not_found())
}
})
}
/// Extract a parameter from a path segment.
///
/// This will try to parse a value from the current request path
/// segment, and if successful, the value is returned as the `Filter`'s
/// "extracted" value.
///
/// If the value could not be parsed, rejects with a `404 Not Found`.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// let route = warp::path::param()
/// .map(|id: u32| {
/// format!("You asked for /{}", id)
/// });
/// ```
pub fn param<T: FromStr + Send>() -> impl Filter<Extract = One<T>, Error = Rejection> + Copy {
segment(|seg| {
trace!("param?: {:?}", seg);
if seg.is_empty() {
return Err(reject::not_found());
}
T::from_str(seg).map(one).map_err(|_| reject::not_found())
})
}
/// Extract a parameter from a path segment.
///
/// This will try to parse a value from the current request path
/// segment, and if successful, the value is returned as the `Filter`'s
/// "extracted" value.
///
/// If the value could not be parsed, rejects with a `404 Not Found`. In
/// contrast of `param` method, it reports an error cause in response.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// let route = warp::path::param2()
/// .map(|id: u32| {
/// format!("You asked for /{}", id)
/// });
/// ```
pub fn param2<T>() -> impl Filter<Extract = One<T>, Error = Rejection> + Copy
where
T: FromStr + Send,
T::Err: Into<::reject::Cause>,
{
segment(|seg| {
trace!("param?: {:?}", seg);
if seg.is_empty() {
return Err(reject::not_found());
}
T::from_str(seg).map(one).map_err(|err| {
#[allow(deprecated)]
reject::not_found().with(err.into())
})
})
}
/// Extract the unmatched tail of the path.
///
/// This will return a `Tail`, which allows access to the rest of the path
/// that previous filters have not already matched.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// let route = warp::path("foo")
/// .and(warp::path::tail())
/// .map(|tail| {
/// // GET /foo/bar/baz would return "bar/baz".
/// format!("The tail after foo is {:?}", tail)
/// });
/// ```
pub fn tail() -> impl Filter<Extract = One<Tail>, Error = Never> + Copy {
filter_fn(move |route| {
let path = path_and_query(&route);
let idx = route.matched_path_index();
// Giving the user the full tail means we assume the full path
// has been matched now.
let end = path.path().len() - idx;
route.set_unmatched_path(end);
Ok(one(Tail {
path,
start_index: idx,
}))
})
}
/// Represents that tail part of a request path, returned by the `tail()` filter.
pub struct Tail {
path: PathAndQuery,
start_index: usize,
}
impl Tail {
/// Get the `&str` representation of the remaining path.
pub fn as_str(&self) -> &str {
&self.path.path()[self.start_index..]
}
}
impl fmt::Debug for Tail {
fn | (&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.as_str(), f)
}
}
/// Peek at the unmatched tail of the path, without affecting the matched path.
///
/// This will return a `Peek`, which allows access to the rest of the path
/// that previous filters have not already matched. This differs from `tail`
/// in that `peek` will **not** set the entire path as matched.
///
/// # Example
///
/// ```
/// use warp::Filter;
///
/// let route = warp::path("foo")
/// .and(warp::path::peek())
/// .map(|peek| {
/// // GET /foo/bar/baz would return "bar/baz".
/// format!("The path after foo is {:?}", peek)
/// });
/// ```
pub fn peek() -> impl Filter<Extract = One<Peek>, Error = Never> + Copy {
filter_fn(move |route| {
let path = path_and_query(&route);
let idx = route.matched_path_index();
Ok(one(Peek {
path,
start_index: idx,
}))
})
}
/// Represents that tail part of a request path, returned by the `tail()` filter.
pub struct Peek {
path: PathAndQuery,
start_index: usize,
}
impl Peek {
/// Get the `&str` representation of the remaining path.
pub fn as_str(&self) -> &str {
&self.path.path()[self.start_index..]
}
/// Get an iterator over the segments of the peeked path.
pub fn segments(&self) -> impl Iterator<Item = &str> {
self.as_str().split('/').filter(|seg| !seg.is_empty())
}
}
impl fmt::Debug for Peek {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.as_str(), f)
}
}
/// Returns the full request path, irrespective of other filters.
///
/// This will return a `FullPath`, which can be stringified to return the
/// full path of the request.
///
/// This is more useful in generic pre/post-processing filters, and should
/// probably not be used for request matching/routing.
///
/// # Example
///
/// ```
/// use warp::{Filter, path::FullPath};
/// use std::{collections::HashMap, sync::{Arc, Mutex}};
///
/// let counts = Arc::new(Mutex::new(HashMap::new()));
/// let access_counter = warp::path::full()
/// .map(move |path: FullPath| {
/// let mut counts = counts.lock().unwrap();
///
/// *counts.entry(path.as_str().to_string())
/// .and_modify(|c| *c += 1)
/// .or_insert(0)
/// });
///
/// let route = warp::path("foo")
/// .and(warp::path("bar"))
/// .and(access_counter)
/// .map(|count| {
/// format!("This is the {}th visit to this URL!", count)
/// });
/// ```
pub fn full() -> impl Filter<Extract = One<FullPath>, Error = Never> + Copy {
filter_fn(move |route| Ok(one(FullPath(path_and_query(&route)))))
}
/// Represents the full request path, returned by the `full()` filter.
pub struct FullPath(PathAndQuery);
impl FullPath {
/// Get the `&str` representation of the request path.
pub fn as_str(&self) -> &str {
&self.0.path()
}
}
impl fmt::Debug for FullPath {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.as_str(), f)
}
}
fn segment<F, U>(func: F) -> impl Filter<Extract = U, Error = Rejection> + Copy
where
F: Fn(&str) -> Result<U, Rejection> + Copy,
U: Tuple + Send,
{
filter_fn(move |route| {
let (u, idx) = {
let seg = route
.path()
.splitn(2, '/')
.next()
.expect("split always has at least 1");
(func(seg)?, seg.len())
};
route.set_unmatched_path(idx);
Ok(u)
})
}
fn path_and_query(route: &Route) -> PathAndQuery {
route
.uri()
.path_and_query()
.expect("server URIs should always have path_and_query")
.clone()
}
/// Convenient way to chain multiple path filters together.
///
/// Any number of either type identifiers or string expressions can be passed,
/// each separated by a forward slash (`/`). Strings will be used to match
/// path segments exactly, and type identifiers are used just like
/// [`param`](filters::path::param) filters.
///
/// # Example
///
/// ```
/// # #[macro_use] extern crate warp; fn main() {
/// use warp::Filter;
///
/// // Match `/sum/:a/:b`
/// let route = path!("sum" / u32 / u32)
/// .map(|a, b| {
/// format!("{} + {} = {}", a, b, a + b)
/// });
/// # }
/// ```
///
/// The equivalent filter chain without using the `path!` macro looks this:
///
/// ```
/// use warp::Filter;
///
/// let route = warp::path("sum")
/// .and(warp::path::param::<u32>())
/// .and(warp::path::param::<u32>())
/// .map(|a, b| {
/// format!("{} + {} = {}", a, b, a + b)
/// });
/// ```
///
/// In fact, this is exactly what the macro expands to.
#[macro_export]
macro_rules! path {
(@start $first:tt $(/ $tail:tt)*) => ({
let __p = path!(@segment $first);
$(
let __p = $crate::Filter::and(__p, path!(@segment $tail));
)*
__p
});
(@segment $param:ty) => (
$crate::path::param::<$param>()
);
(@segment $s:expr) => (
$crate::path($s)
);
($($pieces:tt)*) => (
path!(@start $($pieces)*)
);
}
| fmt |
state_2.py | import state
def | ():
state.x = 2 | change |
reverbObjectBinauralisation_flexible.py | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 14 15:59:11 2017
@author: af5u13
"""
# Usage for debugging from raw Python console
#exec(open("/Users/af5u13/dev/visr/src/python/scripts/rsao/reverbObjectBinauralisation.py").read())
import visr
import signalflows
import panning
import pml
import rbbl
import rcl
import rrl
#import objectmodel as om
import h5py
import numpy as np;
import matplotlib.pyplot as plt
import os
class ReverbToBinaural( visr.CompositeComponent ):
def __init__( self, context, name, parent,
loudspeakerConfig,
numberOfInputs,
rendererOutputs,
interpolationPeriod,
diffusionFilters,
trackingConfiguration,
brirRouting,
brirFilters,
scenePort = 4242,
reverbConfiguration=''):
super(ReverbToBinaural,self).__init__( context, name, parent )
self.coreRenderer = signalflows.BaselineRenderer( ctxt, 'renderer', self,
loudspeakerConfig=loudspeakerConfig,
numberOfInputs=numberOfInputs,
numberOfOutputs=rendererOutputs,
interpolationPeriod=interpolationPeriod,
diffusionFilters=diffusionFilters,
reverbConfig=reverbConfiguration,
sceneReceiverPort=scenePort,
trackingConfiguration=trackingConfiguration
)
numFilters = brirFilters.numberOfRows
firLength = brirFilters.numberOfColumns
numRoutings = brirRouting.size
self.convolver = rcl.FirFilterMatrix( ctxt, 'convolver', self,
numberOfInputs=rendererOutputs, | filters=brirFilters,
routings=brirRouting,
controlInputs=rcl.FirFilterMatrix.ControlPortConfig.NoInputs
)
self.audioIn = visr.AudioInputFloat( "audioIn", self, numberOfInputs )
self.audioOut = visr.AudioOutputFloat( "audioOut", self, 2 )
self.audioConnection( self.audioIn, self.coreRenderer.audioPort("input"))
self.audioConnection( self.coreRenderer.audioPort("output"),
self.convolver.audioPort("in"))
self.audioConnection( self.convolver.audioPort("out"), self.audioOut )
if len(trackingConfiguration) > 0:
self.posIn = visr.ParameterInput( "posIn", self,
pml.ListenerPosition.staticType,
pml.DoubleBufferingProtocol.staticType,
pml.EmptyParameterConfig() )
self.parameterConnection( self.posIn, self.coreRenderer.parameterPort("trackingPositionInput") )
# Get VISR base directory from rsao subdirectory.
visrBaseDirectory = os.path.normpath(os.path.join( os.getcwd(), '../../../..' )).replace('\\','/')
blockSize = 1024
samplingFrequency = 48000
parameterUpdatePeriod = 1024
numBlocks = 8
signalLength = blockSize * numBlocks
t = 1.0/samplingFrequency * np.arange(0,signalLength)
numObjects = 1;
ctxt = visr.SignalFlowContext( blockSize, samplingFrequency)
lspConfigFile = os.path.join( visrBaseDirectory, 'config/bbc/bs2051-4+5+0.xml').replace('\\','/')
# lspConfigFile = os.path.join( visrBaseDirectory, 'config/isvr/audiolab_39speakers_1subwoofer.xml' )
lc = panning.LoudspeakerArray( lspConfigFile )
numOutputChannels = np.max( lc.channelIndices() + lc.subwooferChannelIndices() ) +1
numLoudspeakers = lc.numberOfRegularLoudspeakers
diffFilterFile = os.path.join( visrBaseDirectory, 'config/filters/random_phase_allpass_64ch_512taps.wav')
diffFiltersRaw = np.array(pml.MatrixParameterFloat.fromAudioFile( diffFilterFile ),
dtype = np.float32 )
diffFilters = pml.MatrixParameterFloat( diffFiltersRaw[ np.array(lc.channelIndices() )-1,: ] )
reverbConfigStr = '{ "numReverbObjects": %i, "discreteReflectionsPerObject": 20, "lateReverbFilterLength": 2.0, "lateReverbDecorrelationFilters": "%s/config/filters/random_phase_allpass_64ch_1024taps.wav" }' % (numObjects, visrBaseDirectory )
## Load the BBC BRIR dataset
brirFile = os.path.join( os.getcwd(), 'BBC_BRIR.mat' )
brirMat = h5py.File( brirFile )
brirFull = np.array( brirMat['h_sweetspot'], dtype=np.float32 ).copy('C')
# Scalefactor to compensate for the very low amplitudes of the BBC BRIRs
brirScaleFactor = 500;
brirFlat = brirScaleFactor * np.concatenate( (brirFull[:,0,:], brirFull[:,1,:] ) )
brirFilterParam = pml.MatrixParameterFloat( brirFlat, 16 )
numBrirSpeakers = brirFull.shape[0]
# Define the routing for the binaural convolver such that it matches the organisation of the
# flat BRIR matrix.
filterRouting = rbbl.FilterRoutingList()
for idx in range(0, numBrirSpeakers ):
filterRouting.addRouting( idx, 0, idx, 1.0 )
filterRouting.addRouting( idx, 1, idx+numBrirSpeakers, 1.0 )
renderer = ReverbToBinaural( ctxt, 'top', None,
loudspeakerConfig=lc,
numberOfInputs=numObjects,
rendererOutputs=numOutputChannels,
interpolationPeriod=parameterUpdatePeriod,
diffusionFilters=diffFilters,
trackingConfiguration='',
brirFilters = brirFilterParam,
brirRouting = filterRouting,
reverbConfiguration=reverbConfigStr,
scenePort = 4242
)
print( 'Created renderer.' )
flow = rrl.AudioSignalFlow( renderer )
## Non-realtime code
#inputSignal = np.zeros( (numObjects, signalLength ), dtype=np.float32 )
## inputSignal[0,:] = 0.75*np.sin( 2.0*np.pi*440 * t )
#inputSignal[ 0, 100 ] = 1
#
#outputSignal = np.zeros( (2, signalLength ), dtype=np.float32 )
#
#for blockIdx in range(0,numBlocks):
## if blockIdx % (parameterUpdatePeriod/blockSize) == 0:
## ov = paramInput.data()
## ov.clear()
## ov.set( ro.objectId, ro )
## paramInput.swapBuffers()
#
# inputBlock = inputSignal[:, blockIdx*blockSize:(blockIdx+1)*blockSize]
# outputBlock = flow.process( inputBlock )
# outputSignal[:, blockIdx*blockSize:(blockIdx+1)*blockSize] = outputBlock
#
#
#plt.figure(1)
#plt.plot( t, outputSignal[0,:], 'bo-', t, outputSignal[1,:], 'rx-' )
#plt.show( block = False ) | numberOfOutputs=2,
maxFilters=numFilters,
filterLength=firLength,
maxRoutings=numRoutings, |
endpoint.go | // Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build analytics
package analytics
import (
"bytes"
"crypto/tls"
"fmt"
"net/http"
)
type endpoint func(payloads []string) error
func newBatchEndpoint(useragent string) endpoint {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client := &http.Client{Transport: tr}
return func(payloads []string) error {
data := bytes.Buffer{}
for i, p := range payloads {
if i > 0 {
data.WriteRune('\n')
}
data.WriteString(p)
}
req, err := http.NewRequest("POST", "https://www.google-analytics.com/batch", &data)
if err != nil {
return err
}
if useragent != "" {
req.Header.Set("User-Agent", useragent)
}
res, err := client.Do(req)
if err != nil {
return err
}
if res.StatusCode != 200 {
return fmt.Errorf("Got status %v", res.StatusCode)
}
return nil
}
}
func newValidateEndpoint() endpoint | {
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client := &http.Client{Transport: tr}
return func(payloads []string) error {
for _, p := range payloads {
data := bytes.NewBufferString(p)
req, err := http.NewRequest("POST", "https://www.google-analytics.com/debug/collect", data)
if err != nil {
return err
}
res, err := client.Do(req)
if err != nil {
panic(err)
}
if res.StatusCode != 200 {
panic(fmt.Errorf("Got status %v", res.StatusCode))
}
}
return nil
}
} |
|
create-environment-api.js | var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
/**
* Contentful Environment API. Contains methods to access any operations at a space
* level, such as creating and reading entities contained in a space.
* @namespace ContentfulEnvironmentAPI
*/
import cloneDeep from 'lodash/cloneDeep';
import { createRequestConfig } from 'contentful-sdk-core';
import errorHandler from './error-handler';
import entities from './entities';
/**
* @memberof ContentfulEnvironmentAPI
* @typedef {Object} ContentfulEnvironmentAPI
* @prop {function} delete
* @prop {function} update
* @prop {function} getContentType
* @prop {function} getContentTypes
* @prop {function} createContentType
* @prop {function} createContentTypeWithId
* @prop {function} getEntry
* @prop {function} getEntries
* @prop {function} createEntry
* @prop {function} createEntryWithId
* @prop {function} getAsset
* @prop {function} getAssets
* @prop {function} createAsset
* @prop {function} createAssetWithId
* @prop {function} getLocale
* @prop {function} getLocales
* @prop {function} createLocale
* @prop {function} getUiExtension
* @prop {function} getUiExtensions
* @prop {function} createUiExtension
* @prop {function} createUiExtensionWithId
* @prop {function} getEntrySnapshots
* @prop {function} getContentTypeSnapshots
*/
/**
* Creates API object with methods to access the Environment API
* @private
* @param {Object} params - API initialization params
* @prop {Object} http - HTTP client instance
* @prop {Object} entities - Object with wrapper methods for each kind of entity
* @return {ContentfulEnvironmentAPI}
*/
export default function createEnvironmentApi(_ref) {
var http = _ref.http,
httpUpload = _ref.httpUpload;
var wrapEnvironment = entities.environment.wrapEnvironment;
var _entities$contentType = entities.contentType,
wrapContentType = _entities$contentType.wrapContentType,
wrapContentTypeCollection = _entities$contentType.wrapContentTypeCollection;
var _entities$entry = entities.entry,
wrapEntry = _entities$entry.wrapEntry,
wrapEntryCollection = _entities$entry.wrapEntryCollection;
var _entities$asset = entities.asset,
wrapAsset = _entities$asset.wrapAsset,
wrapAssetCollection = _entities$asset.wrapAssetCollection;
var _entities$locale = entities.locale,
wrapLocale = _entities$locale.wrapLocale,
wrapLocaleCollection = _entities$locale.wrapLocaleCollection;
var wrapSnapshotCollection = entities.snapshot.wrapSnapshotCollection;
var wrapEditorInterface = entities.editorInterface.wrapEditorInterface;
var wrapUpload = entities.upload.wrapUpload;
var _entities$uiExtension = entities.uiExtension,
wrapUiExtension = _entities$uiExtension.wrapUiExtension,
wrapUiExtensionCollection = _entities$uiExtension.wrapUiExtensionCollection;
/**
* Environment instances.
* @namespace Environment
*/
/**
* Deletes the environment
* @memberof Environment
* @func delete
* @return {Promise} Promise for the deletion. It contains no data, but the Promise error case should be handled.
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.delete())
* .then(() => console.log('Environment deleted.'))
* .catch(console.error)
*/
function deleteEnvironment() {
return http.delete('').then(function (response) {}, errorHandler);
}
/**
* Updates the environment
* @memberof Environment
* @func update
* @return {Promise<Environment.Environment>} Promise for the updated environment.
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => {
* environment.name = 'New name'
* return environment.update()
* })
* .then((environment) => console.log(`Environment ${environment.sys.id} renamed.`)
* .catch(console.error)
*/
function updateEnvironment() {
var raw = this.toPlainObject();
var data = cloneDeep(raw);
delete data.sys;
return http.put('', data, {
headers: {
'X-Contentful-Version': raw.sys.version
}
}).then(function (response) {
return wrapEnvironment(http, response.data);
}, errorHandler);
}
/**
* Gets a Content Type
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @return {Promise<ContentType.ContentType>} Promise for a Content Type
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getContentType('<content_type_id>'))
* .then((contentType) => console.log(contentType))
* .catch(console.error)
*/
function getContentType(id) {
return http.get('content_types/' + id).then(function (response) {
return wrapContentType(http, response.data);
}, errorHandler);
}
/**
* Gets an EditorInterface for a ContentType
* @memberof ContentfulEnvironmentAPI
* @param {string} contentTypeId
* @return {Promise<EditorInterface.EditorInterface>} Promise for an EditorInterface
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getEditorInterfaceForContentType('<content_type_id>'))
* .then((EditorInterface) => console.log(EditorInterface))
* .catch(console.error)
*/
function getEditorInterfaceForContentType(contentTypeId) {
return http.get('content_types/' + contentTypeId + '/editor_interface').then(function (response) {
return wrapEditorInterface(http, response.data);
}, errorHandler);
}
/**
* Gets a collection of Content Types
* @memberof ContentfulEnvironmentAPI
* @param {Object - Object with search parameters. Check the <a href="https://www.contentful.com/developers/docs/javascript/tutorials/using-js-cda-sdk/#retrieving-entries-with-search-parameters">JS SDK tutorial</a> and the <a href="https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/search-parameters">REST API reference</a> for more details.
* @return {Promise<ContentType.ContentTypeCollection>} Promise for a collection of Content Types
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getContentTypes())
* .then((response) => console.log(response.items))
* .catch(console.error)
*/
function getContentTypes() {
var query = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
return http.get('content_types', createRequestConfig({ query: query })).then(function (response) {
return wrapContentTypeCollection(http, response.data);
}, errorHandler);
}
/**
* Creates a Content Type
* @memberof ContentfulEnvironmentAPI
* @see {ContentType}
* @param {object} data - Object representation of the Content Type to be created
* @return {Promise<ContentType.ContentType>} Promise for the newly created Content Type
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createContentType({
* name: 'Blog Post',
* fields: [
* {
* id: 'title',
* name: 'Title',
* required: true,
* localized: false,
* type: 'Text'
* }
* ]
* }))
* .then((contentType) => console.log(contentType))
* .catch(console.error)
*/
function createContentType(data) {
return http.post('content_types', data).then(function (response) {
return wrapContentType(http, response.data);
}, errorHandler);
}
/**
* Creates a Content Type with a custom id
* @memberof ContentfulEnvironmentAPI
* @see {ContentType.ContentType}
* @param {string} id - Content Type ID
* @param {object} data - Object representation of the Content Type to be created
* @return {Promise<ContentType.ContentType>} Promise for the newly created Content Type
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createContentTypeWithId('<custom-id>', {
* name: 'Blog Post', | * required: true,
* localized: false,
* type: 'Text'
* }
* ]
* }))
* .then((contentType) => console.log(contentType))
* .catch(console.error)
*/
function createContentTypeWithId(id, data) {
return http.put('content_types/' + id, data).then(function (response) {
return wrapContentType(http, response.data);
}, errorHandler);
}
/**
* Gets an Entry
* Warning: if you are using the select operator, when saving, any field that was not selected will be removed
* from your entry in the backend
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @param {Object=} query - Object with search parameters. In this method it's only useful for `locale`.
* @return {Promise<Entry.Entry>} Promise for an Entry
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getEntry('<entry-id>'))
* .then((entry) => console.log(entry))
* .catch(console.error)
*/
function getEntry(id) {
var query = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
normalizeSelect(query);
return http.get('entries/' + id, createRequestConfig({ query: query })).then(function (response) {
return wrapEntry(http, response.data);
}, errorHandler);
}
/**
* Gets a collection of Entries
* Warning: if you are using the select operator, when saving, any field that was not selected will be removed
* from your entry in the backend
* @memberof ContentfulEnvironmentAPI
* @param {Object=} query - Object with search parameters. Check the <a href="https://www.contentful.com/developers/docs/javascript/tutorials/using-js-cda-sdk/#retrieving-entries-with-search-parameters">JS SDK tutorial</a> and the <a href="https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/search-parameters">REST API reference</a> for more details.
* @return {Promise<Entry.EntryCollection>} Promise for a collection of Entries
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getEntries({'content_type': 'foo'})) // you can add more queries as 'key': 'value'
* .then((response) => console.log(response.items))
* .catch(console.error)
*/
function getEntries() {
var query = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
normalizeSelect(query);
return http.get('entries', createRequestConfig({ query: query })).then(function (response) {
return wrapEntryCollection(http, response.data);
}, errorHandler);
}
/**
* Creates a Entry
* @memberof ContentfulEnvironmentAPI
* @see {Entry.Entry}
* @param {string} contentTypeId - The Content Type which this Entry is based on
* @param {object} data - Object representation of the Entry to be created
* @return {Promise<Entry.Entry>} Promise for the newly created Entry
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createEntry('<content_type_id>', {
* fields: {
* title: {
* 'en-US': 'Entry title'
* }
* }
* }))
* .then((entry) => console.log(entry))
* .catch(console.error)
*/
function createEntry(contentTypeId, data) {
return http.post('entries', data, {
headers: {
'X-Contentful-Content-Type': contentTypeId
}
}).then(function (response) {
return wrapEntry(http, response.data);
}, errorHandler);
}
/**
* Creates a Entry with a custom id
* @memberof ContentfulEnvironmentAPI
* @see {Entry.Entry}
* @param {string} contentTypeId - The Content Type which this Entry is based on
* @param {string} id - Entry ID
* @param {object} data - Object representation of the Entry to be created
* @return {Promise<Entry.Entry>} Promise for the newly created Entry
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* // Create entry
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createEntryWithId('<content_type_id>', '<entry_id>', {
* fields: {
* title: {
* 'en-US': 'Entry title'
* }
* }
* }))
* .then((entry) => console.log(entry))
* .catch(console.error)
*/
function createEntryWithId(contentTypeId, id, data) {
return http.put('entries/' + id, data, {
headers: {
'X-Contentful-Content-Type': contentTypeId
}
}).then(function (response) {
return wrapEntry(http, response.data);
}, errorHandler);
}
/**
* Gets an Asset
* Warning: if you are using the select operator, when saving, any field that was not selected will be removed
* from your entry in the backend
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @param {Object=} query - Object with search parameters. In this method it's only useful for `locale`.
* @return {Promise<Asset.Asset>} Promise for an Asset
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getAsset('<asset_id>'))
* .then((asset) => console.log(asset))
* .catch(console.error)
*/
function getAsset(id) {
var query = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
normalizeSelect(query);
return http.get('assets/' + id, createRequestConfig({ query: query })).then(function (response) {
return wrapAsset(http, response.data);
}, errorHandler);
}
/**
* Gets a collection of Assets
* Warning: if you are using the select operator, when saving, any field that was not selected will be removed
* from your entry in the backend
* @memberof ContentfulEnvironmentAPI
* @param {Object=} query - Object with search parameters. Check the <a href="https://www.contentful.com/developers/docs/javascript/tutorials/using-js-cda-sdk/#retrieving-entries-with-search-parameters">JS SDK tutorial</a> and the <a href="https://www.contentful.com/developers/docs/references/content-delivery-api/#/reference/search-parameters">REST API reference</a> for more details.
* @return {Promise<Asset.AssetCollection>} Promise for a collection of Assets
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getAssets())
* .then((response) => console.log(response.items))
* .catch(console.error)
*/
function getAssets() {
var query = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
normalizeSelect(query);
return http.get('assets', createRequestConfig({ query: query })).then(function (response) {
return wrapAssetCollection(http, response.data);
}, errorHandler);
}
/**
* Creates a Asset. After creation, call asset.processForLocale or asset.processForAllLocales to start asset processing.
* @memberof ContentfulEnvironmentAPI
* @see {Asset.Asset}
* @param {object} data - Object representation of the Asset to be created. Note that the field object should have an upload property on asset creation, which will be removed and replaced with an url property when processing is finished.
* @return {Promise<Asset.Asset>} Promise for the newly created Asset
* @example
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* // Create asset
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createAsset({
* fields: {
* title: {
* 'en-US': 'Playsam Streamliner'
* },
* file: {
* 'en-US': {
* contentType: 'image/jpeg',
* fileName: 'example.jpeg',
* upload: 'https://example.com/example.jpg'
* }
* }
* }
* }))
* .then((asset) => asset.process())
* .then((asset) => console.log(asset))
* .catch(console.error)
*/
function createAsset(data) {
return http.post('assets', data).then(function (response) {
return wrapAsset(http, response.data);
}, errorHandler);
}
/**
* Creates a Asset with a custom id. After creation, call asset.processForLocale or asset.processForAllLocales to start asset processing.
* @memberof ContentfulEnvironmentAPI
* @see {Asset.Asset}
* @param {string} id - Asset ID
* @param {object} data - Object representation of the Asset to be created. Note that the field object should have an upload property on asset creation, which will be removed and replaced with an url property when processing is finished.
* @return {Promise<Asset.Asset>} Promise for the newly created Asset
* @example
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* // Create asset
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createAssetWithId('<asset_id>', {
* title: {
* 'en-US': 'Playsam Streamliner'
* },
* file: {
* 'en-US': {
* contentType: 'image/jpeg',
* fileName: 'example.jpeg',
* upload: 'https://example.com/example.jpg'
* }
* }
* }))
* .then((asset) => asset.process())
* .then((asset) => console.log(asset))
* .catch(console.error)
*/
function createAssetWithId(id, data) {
return http.put('assets/' + id, data).then(function (response) {
return wrapAsset(http, response.data);
}, errorHandler);
}
/**
* Creates a Asset based on files. After creation, call asset.processForLocale or asset.processForAllLocales to start asset processing.
* @memberof ContentfulEnvironmentAPI
* @see {Asset.Asset}
* @param {object} data - Object representation of the Asset to be created. Note that the field object should have an uploadFrom property on asset creation, which will be removed and replaced with an url property when processing is finished.
* @param {object} data.fields.file.[LOCALE].file - Can be a string, an ArrayBuffer or a Stream.
* @return {Promise<Asset.Asset>} Promise for the newly created Asset
* @example
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createAssetFromFiles({
* fields: {
* file: {
* 'en-US': {
* contentType: 'image/jpeg',
* fileName: 'filename_english.jpg',
* file: createReadStream('path/to/filename_english.jpg')
* },
* 'de-DE': {
* contentType: 'image/svg+xml',
* fileName: 'filename_german.svg',
* file: '<svg><path fill="red" d="M50 50h150v50H50z"/></svg>'
* }
* }
* }
* }))
* .then((asset) => console.log(asset))
* .catch(console.error)
*/
function createAssetFromFiles(data) {
var file = data.fields.file;
return Promise.all(Object.keys(file).map(function (locale) {
var _file$locale = file[locale],
contentType = _file$locale.contentType,
fileName = _file$locale.fileName;
return createUpload(file[locale]).then(function (upload) {
return _defineProperty({}, locale, {
contentType: contentType,
fileName: fileName,
uploadFrom: {
sys: {
type: 'Link',
linkType: 'Upload',
id: upload.sys.id
}
}
});
});
})).then(function (uploads) {
data.fields.file = uploads.reduce(function (fieldsData, upload) {
return _extends({}, fieldsData, upload);
}, {});
return createAsset(data);
}).catch(errorHandler);
}
/**
* Creates a Upload.
* @memberof ContentfulEnvironmentAPI
* @param {object} data - Object with file information.
* @param {object} data.file - Actual file content. Can be a string, an ArrayBuffer or a Stream.
* @return {Promise<Upload>} Upload object containing information about the uploaded file.
* @example
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
* const uploadStream = createReadStream('path/to/filename_english.jpg')
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createUpload({file: uploadStream, 'image/png'})
* .then((upload) => console.log(upload))
* .catch(console.error)
*/
function createUpload(data) {
var file = data.file;
if (!file) {
return Promise.reject(new Error('Unable to locate a file to upload.'));
}
return httpUpload.post('uploads', file, {
headers: {
'Content-Type': 'application/octet-stream'
}
}).then(function (uploadResponse) {
return wrapUpload(httpUpload, uploadResponse.data);
}).catch(errorHandler);
}
/**
* Gets an Upload
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @return {Promise<Upload>} Promise for an Upload
* @example
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
* const uploadStream = createReadStream('path/to/filename_english.jpg')
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getUpload('<upload-id>')
* .then((upload) => console.log(upload))
* .catch(console.error)
*/
function getUpload(id) {
return httpUpload.get('uploads/' + id).then(function (response) {
return wrapUpload(http, response.data);
}).catch(errorHandler);
}
/**
* Gets a Locale
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @return {Promise<Locale.Locale>} Promise for an Locale
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getLocale('<locale_id>'))
* .then((locale) => console.log(locale))
* .catch(console.error)
*/
function getLocale(id) {
return http.get('locales/' + id).then(function (response) {
return wrapLocale(http, response.data);
}, errorHandler);
}
/**
* Gets a collection of Locales
* @memberof ContentfulEnvironmentAPI
* @return {Promise<Locale.LocaleCollection>} Promise for a collection of Locales
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getLocales())
* .then((response) => console.log(response.items))
* .catch(console.error)
*/
function getLocales() {
return http.get('locales').then(function (response) {
return wrapLocaleCollection(http, response.data);
}, errorHandler);
}
/**
* Creates a Locale
* @memberof ContentfulEnvironmentAPI
* @see {Locale.Locale}
* @param {object} data - Object representation of the Locale to be created
* @return {Promise<Locale.Locale>} Promise for the newly created Locale
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* // Create locale
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createLocale({
* name: 'German (Austria)',
* code: 'de-AT',
* fallbackCode: 'de-DE',
* optional: true
* }))
* .then((locale) => console.log(locale))
* .catch(console.error)
*/
function createLocale(data) {
return http.post('locales', data).then(function (response) {
return wrapLocale(http, response.data);
}, errorHandler);
}
/**
* Gets an UI Extension
* @memberof ContentfulEnvironmentAPI
* @param {string} id
* @return {Promise<UiExtension.UiExtension>} Promise for an UI Extension
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getUiExtension('<ui-extension-id>'))
* .then((uiExtension) => console.log(uiExtension))
* .catch(console.error)
*/
function getUiExtension(id) {
return http.get('extensions/' + id).then(function (response) {
return wrapUiExtension(http, response.data);
}, errorHandler);
}
/**
* Gets a collection of UI Extension
* @memberof ContentfulEnvironmentAPI
* @return {Promise<UiExtension.UiExtensionCollection>} Promise for a collection of UI Extensions
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getUiExtensions()
* .then((response) => console.log(response.items))
* .catch(console.error)
*/
function getUiExtensions() {
return http.get('extensions').then(function (response) {
return wrapUiExtensionCollection(http, response.data);
}, errorHandler);
}
/**
* Creates a UI Extension
* @memberof ContentfulEnvironmentAPI
* @see {UiExtension.UiExtension}
* @param {object} data - Object representation of the UI Extension to be created
* @return {Promise<UiExtension.UiExtension>} Promise for the newly created UI Extension
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createUiExtension({
* extension: {
* name: 'My awesome extension',
* src: 'https://example.com/my',
* fieldTypes: [
* {
* type: 'Symbol'
* },
* {
* type: 'Text'
* }
* ],
* sidebar: false
* }
* }))
* .then((uiExtension) => console.log(uiExtension))
* .catch(console.error)
*/
function createUiExtension(data) {
return http.post('extensions', data).then(function (response) {
return wrapUiExtension(http, response.data);
}, errorHandler);
}
/**
* Creates a UI Extension with a custom ID
* @memberof ContentfulEnvironmentAPI
* @see {UiExtension.UiExtension}
* @param {string} id
* @param {object} data - Object representation of the UI Extension to be created
* @return {Promise<UiExtension.UiExtension>} Promise for the newly created UI Extension
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.createUiExtensionWithId('<extension_id>', {
* extension: {
* name: 'My awesome extension',
* src: 'https://example.com/my',
* fieldTypes: [
* {
* type: 'Symbol'
* },
* {
* type: 'Text'
* }
* ],
* sidebar: false
* }
* }))
* .then((uiExtension) => console.log(uiExtension))
* .catch(console.error)
*/
function createUiExtensionWithId(id, data) {
return http.put('extensions/' + id, data).then(function (response) {
return wrapUiExtension(http, response.data);
}, errorHandler);
}
/**
* Gets all snapshots of an entry
* @memberof ContentfulEnvironmentAPI
* @func getEntrySnapshots
* @return Promise<Object>
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getEntrySnapshots('<entry_id>'))
* .then((snapshots) => console.log(snapshots.items))
* .catch(console.error)
*/
function getEntrySnapshots(entryId) {
return http.get('entries/' + entryId + '/snapshots').then(function (response) {
return wrapSnapshotCollection(http, response.data);
}, errorHandler);
}
/**
* Gets all snapshots of a contentType
* @memberof ContentfulEnvironmentAPI
* @func getContentTypeSnapshots
* @return Promise<Object>
* @example
* const contentful = require('contentful-management')
*
* const client = contentful.createClient({
* accessToken: '<content_management_api_key>'
* })
*
* client.getSpace('<space_id>')
* .then((space) => space.getEnvironment('<environment-id>'))
* .then((environment) => environment.getContentTypeSnapshots('<contentTypeId>'))
* .then((snapshots) => console.log(snapshots.items))
* .catch(console.error)
*/
function getContentTypeSnapshots(contentTypeId) {
return http.get('content_types/' + contentTypeId + '/snapshots').then(function (response) {
return wrapSnapshotCollection(http, response.data);
}, errorHandler);
}
/*
* @private
* sdk relies heavily on sys metadata
* so we cannot omit the sys property on sdk level
*
*/
function normalizeSelect(query) {
if (query.select && !/sys/i.test(query.select)) {
query.select += ',sys';
}
}
return {
delete: deleteEnvironment,
update: updateEnvironment,
getContentType: getContentType,
getContentTypes: getContentTypes,
createContentType: createContentType,
createContentTypeWithId: createContentTypeWithId,
getEditorInterfaceForContentType: getEditorInterfaceForContentType,
getEntry: getEntry,
getEntries: getEntries,
createEntry: createEntry,
createEntryWithId: createEntryWithId,
getAsset: getAsset,
getAssets: getAssets,
createAsset: createAsset,
createAssetWithId: createAssetWithId,
createAssetFromFiles: createAssetFromFiles,
getUpload: getUpload,
createUpload: createUpload,
getLocale: getLocale,
getLocales: getLocales,
createLocale: createLocale,
getUiExtension: getUiExtension,
getUiExtensions: getUiExtensions,
createUiExtension: createUiExtension,
createUiExtensionWithId: createUiExtensionWithId,
getEntrySnapshots: getEntrySnapshots,
getContentTypeSnapshots: getContentTypeSnapshots
};
} | * fields: [
* {
* id: 'title',
* name: 'Title', |
auth_manager.py | """
Collaborate with RestApiClient to make remote anonymous and authenticated calls.
Uses user_io to request user's login and password and obtain a token for calling authenticated
methods if receives AuthenticationException from RestApiClient.
Flow:
Directly invoke a REST method in RestApiClient, example: get_conan.
if receives AuthenticationException (not open method) will ask user for login and password
and will invoke RestApiClient.get_token() (with LOGIN_RETRIES retries) and retry to call
get_conan with the new token.
"""
import hashlib
from uuid import getnode as get_mac
from conans.client.cmd.user import update_localdb
from conans.errors import AuthenticationException, ConanException, ForbiddenException
from conans.util.log import logger
def input_credentials_if_unauthorized(func):
"""Decorator. Handles AuthenticationException and request user
to input a user and a password"""
LOGIN_RETRIES = 3
def wrapper(self, *args, **kwargs):
try:
# Set custom headers of mac_digest and username
self.set_custom_headers(self.user)
ret = func(self, *args, **kwargs)
return ret
except ForbiddenException:
raise ForbiddenException("Permission denied for user: '%s'" % self.user)
except AuthenticationException:
# User valid but not enough permissions
if self.user is None or self._rest_client.token is None:
# token is None when you change user with user command
# Anonymous is not enough, ask for a user
remote = self.remote
self._user_io.out.info('Please log in to "%s" to perform this action. '
'Execute "conan user" command.' % remote.name)
if "bintray" in remote.url:
self._user_io.out.info('If you don\'t have an account sign up here: '
'https://bintray.com/signup/oss')
return retry_with_new_token(self, *args, **kwargs)
else:
# Token expired or not valid, so clean the token and repeat the call
# (will be anonymous call but exporting who is calling)
logger.info("Token expired or not valid, cleaning the saved token and retrying")
self._store_login((self.user, None))
self._rest_client.token = None
# Set custom headers of mac_digest and username
self.set_custom_headers(self.user)
return wrapper(self, *args, **kwargs)
def retry_with_new_token(self, *args, **kwargs):
"""Try LOGIN_RETRIES to obtain a password from user input for which
we can get a valid token from api_client. If a token is returned,
credentials are stored in localdb and rest method is called"""
for _ in range(LOGIN_RETRIES):
user, password = self._user_io.request_login(self._remote.name, self.user)
try:
token, _, _, _ = self.authenticate(user, password)
except AuthenticationException:
if self.user is None:
self._user_io.out.error('Wrong user or password')
else:
self._user_io.out.error(
'Wrong password for user "%s"' % self.user)
self._user_io.out.info(
'You can change username with "conan user <username>"')
else:
logger.debug("Got token: %s" % str(token))
self._rest_client.token = token
self.user = user
# Set custom headers of mac_digest and username
self.set_custom_headers(user)
return wrapper(self, *args, **kwargs)
raise AuthenticationException("Too many failed login attempts, bye!")
return wrapper
class ConanApiAuthManager(object):
def __init__(self, rest_client, user_io, localdb):
self._user_io = user_io
self._rest_client = rest_client
self._localdb = localdb
self._remote = None
@property
def remote(self):
return self._remote
@remote.setter
def remote(self, remote):
self._remote = remote
self._rest_client.remote_url = remote.url
self._rest_client.verify_ssl = remote.verify_ssl
self.user, self._rest_client.token = self._localdb.get_login(remote.url)
def _store_login(self, login):
try:
self._localdb.set_login(login, self._remote.url) | 'Your credentials could not be stored in local cache\n')
self._user_io.out.debug(str(e) + '\n')
@staticmethod
def get_mac_digest():
sha1 = hashlib.sha1()
sha1.update(str(get_mac()).encode())
return str(sha1.hexdigest())
def set_custom_headers(self, username):
# First identifies our machine, second the username even if it was not
# authenticated
custom_headers = self._rest_client.custom_headers
custom_headers['X-Client-Anonymous-Id'] = self.get_mac_digest()
custom_headers['X-Client-Id'] = str(username or "")
# ######### CONAN API METHODS ##########
@input_credentials_if_unauthorized
def upload_recipe(self, conan_reference, the_files, retry, retry_wait, policy, remote_manifest):
return self._rest_client.upload_recipe(conan_reference, the_files, retry, retry_wait,
policy, remote_manifest)
@input_credentials_if_unauthorized
def upload_package(self, package_reference, the_files, retry, retry_wait, policy):
return self._rest_client.upload_package(package_reference, the_files, retry, retry_wait,
policy)
@input_credentials_if_unauthorized
def get_conan_manifest(self, conan_reference):
return self._rest_client.get_conan_manifest(conan_reference)
@input_credentials_if_unauthorized
def get_package_manifest(self, package_reference):
return self._rest_client.get_package_manifest(package_reference)
@input_credentials_if_unauthorized
def get_package(self, package_reference, dest_folder):
return self._rest_client.get_package(package_reference, dest_folder)
@input_credentials_if_unauthorized
def get_recipe(self, reference, dest_folder):
return self._rest_client.get_recipe(reference, dest_folder)
@input_credentials_if_unauthorized
def get_recipe_sources(self, reference, dest_folder):
return self._rest_client.get_recipe_sources(reference, dest_folder)
@input_credentials_if_unauthorized
def download_files_to_folder(self, urls, dest_folder):
return self._rest_client.download_files_to_folder(urls, dest_folder)
@input_credentials_if_unauthorized
def get_package_info(self, package_reference):
return self._rest_client.get_package_info(package_reference)
@input_credentials_if_unauthorized
def search(self, pattern, ignorecase):
return self._rest_client.search(pattern, ignorecase)
@input_credentials_if_unauthorized
def search_packages(self, reference, query):
return self._rest_client.search_packages(reference, query)
@input_credentials_if_unauthorized
def remove(self, conan_refernce):
return self._rest_client.remove_conanfile(conan_refernce)
@input_credentials_if_unauthorized
def remove_packages(self, conan_reference, package_ids):
return self._rest_client.remove_packages(conan_reference, package_ids)
@input_credentials_if_unauthorized
def get_path(self, conan_reference, path, package_id):
return self._rest_client.get_path(conan_reference, path, package_id)
def authenticate(self, user, password):
if user is None: # The user is already in DB, just need the passwd
prev_user = self._localdb.get_username(self._remote.url)
if prev_user is None:
raise ConanException("User for remote '%s' is not defined" % self._remote.name)
else:
user = prev_user
try:
token = self._rest_client.authenticate(user, password)
except UnicodeDecodeError:
raise ConanException("Password contains not allowed symbols")
# Store result in DB
remote_name, prev_user, user = update_localdb(self._localdb, user, token, self._remote)
return token, remote_name, prev_user, user | except Exception as e:
self._user_io.out.error( |
watchr.js | // Generated by CoffeeScript 1.6.2
var EventEmitter, TaskGroup, Watcher, balUtil, createWatcher, eachr, extendr, fsUtil, pathUtil, safefs, typeChecker, watch, watchers, watchersTotal,
__bind = function(fn, me){ return function(){ return fn.apply(me, arguments); }; },
__hasProp = {}.hasOwnProperty,
__extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function | () { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
__slice = [].slice,
__indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; };
pathUtil = require('path');
fsUtil = require('fs');
balUtil = require('bal-util');
safefs = require('safefs');
extendr = require('extendr');
eachr = require('eachr');
typeChecker = require('typechecker');
TaskGroup = require('taskgroup').TaskGroup;
EventEmitter = require('events').EventEmitter;
/*
Now to make watching files more convient and managed, we'll create a class which we can use to attach to each file.
It'll provide us with the API and abstraction we need to accomplish difficult things like recursion.
We'll also store a global store of all the watchers and their paths so we don't have multiple watchers going at the same time
for the same file - as that would be quite ineffecient.
Events:
- `log` for debugging, receives the arguments `logLevel ,args...`
- `error` for gracefully listening to error events, receives the arguments `err`
- `watching` for when watching of the path has completed, receives the arguments `err, watcherInstance, isWatching`
- `change` for listening to change events, receives the arguments `changeType, fullPath, currentStat, previousStat`
*/
watchersTotal = 0;
watchers = {};
Watcher = (function(_super) {
__extends(_Class, _super);
_Class.prototype.path = null;
_Class.prototype.isDirectory = null;
_Class.prototype.stat = null;
_Class.prototype.fswatcher = null;
_Class.prototype.children = null;
_Class.prototype.state = 'pending';
_Class.prototype.method = null;
_Class.prototype.config = {
path: null,
listener: null,
listeners: null,
stat: null,
outputLog: false,
interval: 5007,
persistent: true,
duplicateDelay: 1 * 1000,
preferredMethods: null,
ignorePaths: false,
ignoreHiddenFiles: false,
ignoreCommonPatterns: true,
ignoreCustomPatterns: null
};
function _Class(config, next) {
this.listener = __bind(this.listener, this);
this.bubbler = __bind(this.bubbler, this);
this.bubble = __bind(this.bubble, this);
this.isIgnoredPath = __bind(this.isIgnoredPath, this);
this.log = __bind(this.log, this); this.children = {};
this.config = extendr.extend({}, this.config);
this.config.preferredMethods = ['watch', 'watchFile'];
if (config.next != null) {
if (next == null) {
next = config.next;
}
delete config.next;
}
if (config) {
this.setup(config);
}
if (next) {
this.watch(next);
}
this;
}
_Class.prototype.log = function() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
if (this.config.outputLog) {
console.log.apply(console, args);
}
this.emit.apply(this, ['log'].concat(__slice.call(args)));
return this;
};
_Class.prototype.isIgnoredPath = function(path, opts) {
var ignore, _ref, _ref1, _ref2, _ref3;
if (opts == null) {
opts = {};
}
ignore = balUtil.isIgnoredPath(path, {
ignorePaths: (_ref = opts.ignorePaths) != null ? _ref : this.config.ignorePaths,
ignoreHiddenFiles: (_ref1 = opts.ignoreHiddenFiles) != null ? _ref1 : this.config.ignoreHiddenFiles,
ignoreCommonPatterns: (_ref2 = opts.ignoreCommonPatterns) != null ? _ref2 : this.config.ignoreCommonPatterns,
ignoreCustomPatterns: (_ref3 = opts.ignoreCustomPatterns) != null ? _ref3 : this.config.ignoreCustomPatterns
});
this.log('debug', "ignore: " + path + " " + (ignore ? 'yes' : 'no'));
return ignore;
};
/*
Setup our Instance
*/
_Class.prototype.setup = function(config) {
extendr.extend(this.config, config);
this.path = this.config.path;
if (this.config.stat) {
this.stat = this.config.stat;
this.isDirectory = this.stat.isDirectory();
delete this.config.stat;
}
if (this.config.listener || this.config.listeners) {
this.removeAllListeners();
if (this.config.listener) {
this.listen(this.config.listener);
delete this.config.listener;
}
if (this.config.listeners) {
this.listen(this.config.listeners);
delete this.config.listeners;
}
}
return this;
};
_Class.prototype.bubble = function() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
this.emit.apply(this, args);
return this;
};
_Class.prototype.bubbler = function(eventName) {
var _this = this;
return function() {
var args;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
return _this.bubble.apply(_this, [eventName].concat(__slice.call(args)));
};
};
/*
Listen
Add listeners to our watcher instance.
Overloaded to also accept the following:
- `changeListener` a single change listener
- `[changeListener]` an array of change listeners
- `{eventName:eventListener}` an object keyed with the event names and valued with a single event listener
- `{eventName:[eventListener]}` an object keyed with the event names and valued with an array of event listeners
*/
_Class.prototype.listen = function(eventName, listener) {
var listenerArray, listeners, _i, _j, _len, _len1;
if (listener == null) {
listeners = eventName;
if (typeChecker.isArray(listeners)) {
for (_i = 0, _len = listeners.length; _i < _len; _i++) {
listener = listeners[_i];
this.listen('change', listener);
}
} else if (typeChecker.isPlainObject(listeners)) {
for (eventName in listeners) {
if (!__hasProp.call(listeners, eventName)) continue;
listenerArray = listeners[eventName];
if (typeChecker.isArray(listenerArray)) {
for (_j = 0, _len1 = listenerArray.length; _j < _len1; _j++) {
listener = listenerArray[_j];
this.listen(eventName, listener);
}
} else {
this.listen(eventName, listenerArray);
}
}
} else {
this.listen('change', listeners);
}
} else {
this.removeListener(eventName, listener);
this.on(eventName, listener);
this.log('debug', "added a listener: on " + this.path + " for event " + eventName);
}
return this;
};
/*
Emit Safe
Sometimes events can fire incredibly quickly in which case we'll determine multiple events
This alias for emit('change',...) will check to see if the event has already been fired recently
and if it has, then ignore it
*/
_Class.prototype.cacheTimeout = null;
_Class.prototype.cachedEvents = null;
_Class.prototype.emitSafe = function() {
var args, config, me, thisEvent, _ref;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
me = this;
config = this.config;
if (config.duplicateDelay) {
if (this.cacheTimeout != null) {
clearTimeout(this.cacheTimeout);
}
this.cacheTimeout = setTimeout(function() {
me.cachedEvents = [];
return me.cacheTimeout = null;
}, config.duplicateDelay);
if ((_ref = this.cachedEvents) == null) {
this.cachedEvents = [];
}
thisEvent = JSON.stringify(args);
if (__indexOf.call(this.cachedEvents, thisEvent) >= 0) {
this.log('debug', "event ignored on " + this.path + " due to duplicate:", args);
return this;
}
this.cachedEvents.push(thisEvent);
}
this.emit.apply(this, args);
return this;
};
/*
Listener
A change event has fired
Things to note:
- watchFile:
- currentStat still exists even for deleted/renamed files
- for deleted and updated files, it will fire on the file
- for created files, it will fire on the directory
- fsWatcher:
- eventName is either 'change' or 'rename', this value cannot be trusted
- currentStat still exists even for deleted/renamed files
- previousStat is accurate, however we already have this
- for deleted and changed files, it will fire on the file
- for new files, it will fire on the directory
Arguments for our change listener will be:
- for updated files the arguments will be: `'update', fullPath, currentStat, previousStat`
- for created files the arguments will be: `'create', fullPath, currentStat, null`
- for deleted files the arguments will be: `'delete', fullPath, null, previousStat`
In the future we will add:
- for renamed files: 'rename', fullPath, currentStat, previousStat, newFullPath
- rename is possible as the stat.ino is the same for the delete and create
*/
_Class.prototype.listener = function() {
var args, currentStat, determineTheChange, fileExists, fileFullPath, isTheSame, me, previousStat,
_this = this;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
me = this;
fileFullPath = this.path;
currentStat = null;
previousStat = this.stat;
fileExists = null;
this.log('debug', "watch event triggered on " + this.path + ":", args);
if (args[0] === 'change' && this.children[args[1]]) {
return (function() {
var childFileRelativePath, childFileWatcher;
childFileRelativePath = args[1];
childFileWatcher = _this.children[args[1]];
_this.log('debug', 'forwarding initial change detection to child:', childFileRelativePath, 'via:', fileFullPath);
return childFileWatcher.listener('change', '.');
})();
}
isTheSame = function() {
if ((currentStat != null) && (previousStat != null)) {
if (currentStat.size === previousStat.size && currentStat.mtime.toString() === previousStat.mtime.toString()) {
return true;
}
}
return false;
};
determineTheChange = function() {
if (!fileExists) {
_this.log('debug', 'determined delete:', fileFullPath);
return _this.close('deleted');
} else {
if (isTheSame()) {
return _this.log('debug', "determined same:", fileFullPath);
} else {
if (_this.isDirectory) {
if (isTheSame() === false) {
return safefs.readdir(fileFullPath, function(err, newFileRelativePaths) {
if (err) {
return _this.emit('error', err);
}
if (typeChecker.isString(args[0]) && args[1] === null) {
eachr(_this.children, function(childFileWatcher, childFileRelativePath) {
if (__indexOf.call(newFileRelativePaths, childFileRelativePath) < 0) {
return;
}
if (!childFileWatcher) {
return;
}
_this.log('debug', 'forwarding extensive change detection to child:', childFileRelativePath, 'via:', fileFullPath);
childFileWatcher.listener('change', '.');
});
}
eachr(_this.children, function(childFileWatcher, childFileRelativePath) {
var childFileFullPath;
if (__indexOf.call(newFileRelativePaths, childFileRelativePath) >= 0) {
return;
}
childFileFullPath = pathUtil.join(fileFullPath, childFileRelativePath);
if (_this.isIgnoredPath(childFileFullPath)) {
_this.log('debug', 'ignored delete:', childFileFullPath, 'via:', fileFullPath);
return;
}
_this.log('debug', 'determined delete:', childFileFullPath, 'via:', fileFullPath);
_this.closeChild(childFileRelativePath, 'deleted');
});
return eachr(newFileRelativePaths, function(childFileRelativePath) {
var childFileFullPath;
if (_this.children[childFileRelativePath] != null) {
return;
}
_this.children[childFileRelativePath] = false;
childFileFullPath = pathUtil.join(fileFullPath, childFileRelativePath);
if (_this.isIgnoredPath(childFileFullPath)) {
_this.log('debug', 'ignored create:', childFileFullPath, 'via:', fileFullPath);
return;
}
return safefs.stat(childFileFullPath, function(err, childFileStat) {
if (err) {
return;
}
_this.log('debug', 'determined create:', childFileFullPath, 'via:', fileFullPath);
_this.emitSafe('change', 'create', childFileFullPath, childFileStat, null);
_this.watchChild({
fullPath: childFileFullPath,
relativePath: childFileRelativePath,
stat: childFileStat
});
});
});
});
}
} else {
_this.log('debug', 'determined update:', fileFullPath);
return _this.emitSafe('change', 'update', fileFullPath, currentStat, previousStat);
}
}
}
};
safefs.exists(fileFullPath, function(exists) {
fileExists = exists;
if (fileExists) {
return safefs.stat(fileFullPath, function(err, stat) {
if (err) {
return _this.emit('error', err);
}
currentStat = stat;
_this.stat = currentStat;
return determineTheChange();
});
} else {
return determineTheChange();
}
});
return this;
};
/*
Close
We will need something to close our listener for removed or renamed files
As renamed files are a bit difficult we will want to close and delete all the watchers for all our children too
Essentially it is a self-destruct
*/
_Class.prototype.close = function(reason) {
var childRelativePath, _ref;
if (this.state !== 'active') {
return this;
}
this.log('debug', "close: " + this.path);
_ref = this.children;
for (childRelativePath in _ref) {
if (!__hasProp.call(_ref, childRelativePath)) continue;
this.closeChild(childRelativePath, reason);
}
if (this.method === 'watchFile') {
fsUtil.unwatchFile(this.path);
}
if (this.fswatcher != null) {
this.fswatcher.close();
this.fswatcher = null;
}
if (reason === 'deleted') {
this.state = 'deleted';
this.emitSafe('change', 'delete', this.path, null, this.stat);
} else if (reason === 'failure') {
this.state = 'closed';
this.log('warn', "Failed to watch the path " + this.path);
} else {
this.state = 'closed';
}
if (watchers[this.path] != null) {
delete watchers[this.path];
watchersTotal--;
}
return this;
};
_Class.prototype.closeChild = function(fileRelativePath, reason) {
var watcher;
if (this.children[fileRelativePath] != null) {
watcher = this.children[fileRelativePath];
if (watcher) {
watcher.close(reason);
}
delete this.children[fileRelativePath];
}
return this;
};
/*
Watch Child
Setup watching for a child
Bubble events of the child into our instance
Also instantiate the child with our instance's configuration where applicable
next(err,watcher)
*/
_Class.prototype.watchChild = function(opts, next) {
var config, me, _base, _name;
me = this;
config = this.config;
(_base = me.children)[_name = opts.relativePath] || (_base[_name] = watch({
path: opts.fullPath,
stat: opts.stat,
listeners: {
'log': me.bubbler('log'),
'change': function() {
var args, changeType, path;
args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
changeType = args[0], path = args[1];
if (changeType === 'delete' && path === opts.fullPath) {
me.closeChild(opts.relativePath, 'deleted');
}
return me.bubble.apply(me, ['change'].concat(__slice.call(args)));
},
'error': me.bubbler('error')
},
next: next,
outputLog: config.outputLog,
interval: config.interval,
persistent: config.persistent,
duplicateDelay: config.duplicateDelay,
preferredMethods: config.preferredMethods,
ignorePaths: config.ignorePaths,
ignoreHiddenFiles: config.ignoreHiddenFiles,
ignoreCommonPatterns: config.ignoreCommonPatterns,
ignoreCustomPatterns: config.ignoreCustomPatterns
}));
return me.children[opts.relativePath];
};
/*
Watch Children
next(err,watching)
*/
_Class.prototype.watchChildren = function(next) {
var config, me;
me = this;
config = this.config;
if (this.isDirectory) {
balUtil.scandir({
path: this.path,
ignorePaths: config.ignorePaths,
ignoreHiddenFiles: config.ignoreHiddenFiles,
ignoreCommonPatterns: config.ignoreCommonPatterns,
ignoreCustomPatterns: config.ignoreCustomPatterns,
recurse: false,
next: function(err) {
var watching;
watching = !err;
return next(err, watching);
},
action: function(fullPath, relativePath, nextFile, stat) {
if (me.state !== 'active') {
return nextFile(null, true);
}
return me.watchChild({
fullPath: fullPath,
relativePath: relativePath,
stat: stat
}, function(err, watcher) {
return nextFile(err);
});
}
});
} else {
next(null, true);
}
return this;
};
/*
Watch Self
*/
_Class.prototype.watchSelf = function(next) {
var complete, config, me, methodOne, methodTwo, methods;
me = this;
config = this.config;
this.method = null;
methods = {
watch: function(next) {
var err;
if (fsUtil.watch == null) {
return next(null, false);
}
try {
me.fswatcher = fsUtil.watch(me.path, me.listener);
} catch (_error) {
err = _error;
return next(err, false);
}
me.method = 'watch';
return next(null, true);
},
watchFile: function(next) {
var err, watchFileOpts;
if (fsUtil.watchFile == null) {
return next(null, false);
}
watchFileOpts = {
persistent: config.persistent,
interval: config.interval
};
try {
fsUtil.watchFile(me.path, watchFileOpts, me.listener);
} catch (_error) {
err = _error;
return next(err, false);
}
me.method = 'watchFile';
return next(null, true);
}
};
complete = function(watching) {
if (!watching) {
me.close('failure');
return next(null, false);
}
me.state = 'active';
return next(null, true);
};
methodOne = me.config.preferredMethods[0];
methodTwo = me.config.preferredMethods[1];
methods[methodOne](function(err1, watching) {
if (watching) {
return complete(watching);
}
return methods[methodTwo](function(err2, watching) {
if (watching) {
return complete(watching);
}
if (err1) {
me.emit('error', err1);
}
if (err2) {
me.emit('error', err2);
}
return complete(false);
});
});
return this;
};
/*
Watch
Setup the native watching handlers for our path so we can receive updates on when things happen
If the next argument has been received, then add it is a once listener for the watching event
If we are already watching this path then let's start again (call close)
If we are a directory, let's recurse
If we are deleted, then don't error but return the isWatching argument of our completion callback as false
Once watching has completed for this directory and all children, then emit the watching event
next(err,watcherInstance,success)
*/
_Class.prototype.watch = function(next) {
var complete, config, me,
_this = this;
me = this;
config = this.config;
if ((this.stat != null) === false) {
safefs.stat(config.path, function(err, stat) {
if (err) {
return _this.emit('error', err);
}
_this.stat = stat;
_this.isDirectory = stat.isDirectory();
return _this.watch(next);
});
return this;
}
if (next != null) {
this.listen('watching', next);
}
this.close();
this.log('debug', "watch: " + this.path);
complete = function(err, result) {
if (err == null) {
err = null;
}
if (result == null) {
result = true;
}
if (err || !result) {
me.close();
return me.emit('watching', err, me, false);
} else {
return me.emit('watching', null, me, true);
}
};
safefs.exists(this.path, function(exists) {
if (!exists) {
return complete(null, false);
}
return me.watchSelf(function(err, watching) {
if (err || !watching) {
return complete(err, watching);
}
return me.watchChildren(function(err, watching) {
return complete(err, watching);
});
});
});
return this;
};
return _Class;
})(EventEmitter);
/*
Create Watcher
Checks to see if the path actually exists, if it doesn't then exit gracefully
If it does exist, then lets check our cache for an already existing watcher instance
If we have an already existing watching instance, then just add our listeners to that
If we don't, then create a watching instance
Fire the next callback once done
next(err,watcherInstance)
*/
createWatcher = function(opts, next) {
var attempt, listener, listeners, path, watcher;
path = opts.path, listener = opts.listener, listeners = opts.listeners;
if (opts.next != null) {
if (next == null) {
next = opts.next;
}
delete opts.next;
}
if (!safefs.existsSync(path)) {
if (typeof next === "function") {
next(null, null);
}
return;
}
if (watchers[path] != null) {
watcher = watchers[path];
if (listener) {
watcher.listen(listener);
}
if (listeners) {
watcher.listen(listeners);
}
if (typeof next === "function") {
next(null, watcher);
}
} else {
attempt = 0;
watcher = new Watcher(opts, function(err) {
if (!err || attempt !== 0) {
return typeof next === "function" ? next(err, watcher) : void 0;
}
++attempt;
watcher.log('debug', "Preferred method failed, trying methods in reverse order", err);
return watcher.setup({
preferredMethods: watcher.config.preferredMethods.reverse()
}).watch();
});
watchers[path] = watcher;
++watchersTotal;
}
return watcher;
};
/*
Watch
Provides an abstracted API that supports multiple paths
If you are passing in multiple paths then do not rely on the return result containing all of the watchers
you must rely on the result inside the completion callback instead
If you used the paths option, then your results will be an array of watcher instances, otherwise they will be a single watcher instance
next(err,results)
*/
watch = function(opts, next) {
var paths, result, tasks;
result = [];
if (opts.next != null) {
if (next == null) {
next = opts.next;
}
delete opts.next;
}
if (opts.paths) {
paths = opts.paths;
delete opts.paths;
if (typeChecker.isArray(paths)) {
tasks = new TaskGroup().setConfig({
concurrency: 0
}).on('complete', function(err) {
return typeof next === "function" ? next(err, result) : void 0;
});
paths.forEach(function(path) {
return tasks.addTask(function(complete) {
var localOpts, watcher;
localOpts = extendr.extend({}, opts);
localOpts.path = path;
watcher = createWatcher(localOpts, complete);
if (watcher) {
return result.push(watcher);
}
});
});
tasks.run();
} else {
opts.path = paths;
result.push(createWatcher(opts, function(err) {
return typeof next === "function" ? next(err, result) : void 0;
}));
}
} else {
result = createWatcher(opts, next);
}
return result;
};
module.exports = {
watch: watch,
Watcher: Watcher
};
| ctor |
path.rs | use std;
pub type Path = std::path::Path;
pub type PathBuf = std::path::PathBuf; | pub type StripPrefixError = std::path::StripPrefixError;
pub type PathMsg = Option<std::path::PathBuf>; | |
mode.py | from OpenTCLFile import *
def modeNumber(TCLFile):
global numModes
modeNumbers = OpenSeesTclRead(TCLFile, 'set numModes', 3)
| if str(modeNumbers):
numModes = modeNumbers[:, 2]#.astype(int)
return numModes |
|
SideNav.js | import React from "react";
import { connect } from "react-redux";
import { withRouter } from "react-router-dom";
import {
SideBarWrapper,
StyledClose,
StyledDashboard,
StyledPeople,
StyledBuild,
StyledAssignment,
StyledPermIdentity,
StyledMessage,
ImgNameWrapper
} from "./SideNavStyles";
function | (props) {
const { main_color, side_bar_color } = props.ui.colors;
return (
<SideBarWrapper
bgColor={main_color}
fontColor={side_bar_color}
sideNavWidth={props.sideNavWidth}
>
<StyledClose
onClick={e => {
props.setSideNavWidth("0px");
}}
/>
<div
style={
props.sideNavWidth === "0px"
? { display: "none" }
: { display: "block" }
}
>
<ImgNameWrapper>
<img
style={{
width: "90px",
height: "90px",
borderRadius: "50%",
margin: "auto",
marginBottom: "16px",
marginTop: "36px",
background: side_bar_color
}}
alt="user avatar"
src={
props.user.photoUrl ||
"https://www.legaseeds.com/assets/user_placeholder.svg"
}
/>
<h5>{props.user.full_name}</h5>
</ImgNameWrapper>
</div>
{/* ***** */}
{/* ***** */}
{/* ***** */}
{/* nav links */}
{/* ***** */}
{/* ***** */}
{/* ***** */}
<ul
style={
props.sideNavWidth === "0px"
? { display: "none" }
: { display: "block" }
}
>
<li
style={
props.ui.curTab === "Dashboard"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app");
}}
>
<StyledDashboard />
Dashboard
</li>
<li
style={
props.ui.curTab === "Employees"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app/employees");
}}
>
<StyledPeople />
Employees
</li>
<li
style={
props.ui.curTab === "Projects"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app/projects");
}}
>
<StyledBuild />
Projects
</li>
<li
style={
props.ui.curTab === "Customers"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app/customers");
}}
>
<StyledPermIdentity />
Customers
</li>
<li
style={
props.ui.curTab === "Materials"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app/materials");
}}
>
<StyledAssignment />
Materials
</li>
<li
style={
props.ui.curTab === "Messages"
? { background: "rgba(255,255,255,0.1)" }
: {}
}
onClick={e => {
props.history.push("/app/messages");
}}
>
<StyledMessage />
Messages
</li>
</ul>
</SideBarWrapper>
);
}
const mapStateToProps = state => {
return {
user: { ...state.auth.user },
company: { ...state.auth.company },
ui: { ...state.ui }
};
};
export default connect(
mapStateToProps,
{}
)(withRouter(SideNav));
| SideNav |
actionPlanEditConfirmationView.ts | import ActionPlanEditConfirmationPresenter from './actionPlanEditConfirmationPresenter'
export default class ActionPlanEditConfirmationView {
constructor(private readonly presenter: ActionPlanEditConfirmationPresenter) {}
get renderArgs(): [string, Record<string, unknown>] {
return [
'serviceProviderReferrals/actionPlan/actionPlanEditConfirmation',
{
presenter: this.presenter,
backLinkArgs: {
text: 'Back',
href: this.presenter.viewActionPlanUrl,
},
}, | ]
}
} | |
saveload.rs | extern crate ron;
#[macro_use]
extern crate serde;
extern crate specs;
use std::fmt;
use specs::{
error::NoError,
prelude::*,
saveload::{
DeserializeComponents, MarkedBuilder, SerializeComponents, SimpleMarker,
SimpleMarkerAllocator,
},
};
// This is an example of how the serialized data of two entities might look on
// disk.
//
// When serializing entities, they are written in an array of tuples, each tuple
// representing one entity. The entity's marker and components are written as
// fields into these tuples, knowing nothing about the original entity's id.
const ENTITIES: &str = "
[
(
marker: (0),
components: (
Some((
x: 10,
y: 20,
)),
Some((30.5)),
),
),
(
marker: (1),
components: (
Some(Pos(
x: 5,
y: 2,
)),
None,
),
),
]
";
// A dummy component that can be serialized and deserialized.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
struct Pos {
x: f32,
y: f32,
}
impl Component for Pos {
type Storage = VecStorage<Self>;
}
// A dummy component that can be serialized and deserialized.
#[derive(Clone, Copy, Debug, Deserialize, Serialize)]
struct Mass(f32);
impl Component for Mass {
type Storage = VecStorage<Self>;
}
// It is necessary to supply the `(De)SerializeComponents`-trait with an error
// type that implements the `Display`-trait. In this case we want to be able to
// return different errors, and we are going to use a `.ron`-file to store our
// data. Therefore we use a custom enum, which can display both the `NoError`and
// `ron::ser::Error` type. This enum could be extended to incorporate for
// example `std::io::Error` and more.
#[derive(Debug)]
enum Combined {
Ron(ron::ser::Error),
}
// Implementing the required `Display`-trait, by matching the `Combined` enum,
// allowing different error types to be displayed.
impl fmt::Display for Combined {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Combined::Ron(ref e) => write!(f, "{}", e),
}
}
}
// This returns the `ron::ser:Error` in form of the `Combined` enum, which can
// then be matched and displayed accordingly.
impl From<ron::ser::Error> for Combined {
fn from(x: ron::ser::Error) -> Self {
Combined::Ron(x)
}
}
// This cannot be called.
impl From<NoError> for Combined {
fn from(e: NoError) -> Self {
match e {}
}
}
struct NetworkSync;
fn main() {
let mut world = World::new();
// Since in this example no system uses these resources, they have to be
// registered manually. This is typically not required.
world.register::<Pos>();
world.register::<Mass>();
world.register::<SimpleMarker<NetworkSync>>();
// Adds a predefined marker allocator to the world, as a resource.
// This predifined marker uses a `HashMap<u64, Entity>` to keep track of all
// entities that should be (de)serializable, as well as which ids are
// already in use.
world.insert(SimpleMarkerAllocator::<NetworkSync>::new());
world
.create_entity()
.with(Pos { x: 1.0, y: 2.0 })
.with(Mass(0.5))
// The `.marked` function belongs to the [`MarkedBuilder`](struct.MarkedBuilder.html) trait,
// which is implemented for example for the [`EntityBuilder`](struct.EntityBuilder.html).
// It yields the next higher id, that is not yet in use.
//
// Since the `Marker` is passed as a generic type parameter, it is possible to use several different `MarkerAllocators`,
// e.g. to keep track of different types of entities, with different ids.
// **Careful when deserializing, it is not always clear for every fileforamt whether a number is supposed to be i.e. a `u32` or `u64`!**
.marked::<SimpleMarker<NetworkSync>>()
.build();
world
.create_entity()
.with(Pos { x: 7.0, y: 2.0 })
.with(Mass(4.5))
.marked::<SimpleMarker<NetworkSync>>()
.build();
// Here we create a system that lets us access the entities to serialize.
struct Serialize;
impl<'a> System<'a> for Serialize {
// This SystemData contains the entity-resource, as well as all components that
// shall be serialized, plus the marker component storage.
type SystemData = (
Entities<'a>,
ReadStorage<'a, Pos>,
ReadStorage<'a, Mass>,
ReadStorage<'a, SimpleMarker<NetworkSync>>,
);
fn run(&mut self, (ents, pos, mass, markers): Self::SystemData) {
// First we need a serializer for the format of choice, in this case the
// `.ron`-format.
let mut ser = ron::ser::Serializer::new(Some(Default::default()), true);
// For serialization we use the
// [`SerializeComponents`](struct.SerializeComponents.html)-trait's `serialize`
// function. It takes two generic parameters:
// * An unbound type -> `NoError` (However, the serialize function expects it to
// be bound by the `Display`-trait)
// * A type implementing the `Marker`-trait ->
// [SimpleMarker](struct.SimpleMarker.html) (a convenient, predefined marker)
//
// The first parameter resembles the `.join()` syntax from other specs-systems,
// every component that should be serialized has to be put inside a tuple.
//
// The second and third parameters are just the entity-storage and
// marker-storage, which get `.join()`ed internally.
//
// Lastly, we provide a mutable reference to the serializer of choice, which has
// to have the `serde::ser::Serializer`-trait implemented.
SerializeComponents::<NoError, SimpleMarker<NetworkSync>>::serialize(
&(&pos, &mass),
&ents,
&markers,
&mut ser,
)
.unwrap_or_else(|e| eprintln!("Error: {}", e));
// TODO: Specs should return an error which combines serialization
// and component errors.
// At this point, `ser` could be used to write its contents to a file, which is
// not done here. Instead we print the content of this pseudo-file.
println!("{}", ser.into_output_string());
}
}
// Running the system results in a print to the standard output channel, in
// `.ron`-format, showing how the serialized dummy entities look like.
Serialize.run_now(&world);
// -----------------
// Just like the previous Serialize-system, we write a Deserialize-system.
struct Deserialize;
impl<'a> System<'a> for Deserialize {
// This requires all the component storages our serialized entities have,
// mutably, plus a `MarkerAllocator` resource to write the deserialized
// ids into, so that we can later serialize again.
type SystemData = (
Entities<'a>,
Write<'a, SimpleMarkerAllocator<NetworkSync>>,
WriteStorage<'a, Pos>,
WriteStorage<'a, Mass>,
WriteStorage<'a, SimpleMarker<NetworkSync>>,
);
fn | (&mut self, (ent, mut alloc, pos, mass, mut markers): Self::SystemData) {
// The `const ENTITIES: &str` at the top of this file was formatted according to
// the `.ron`-specs, therefore we need a `.ron`-deserializer.
// Others can be used, as long as they implement the
// `serde::de::Deserializer`-trait.
use ron::de::Deserializer;
// Typical file operations are omitted in this example, since we do not have a
// seperate file, but a `const &str`. We use a convencience function
// of the `ron`-crate: `from_str`, to convert our data form the top of the file.
if let Ok(mut de) = Deserializer::from_str(ENTITIES) {
// Again, we need to pass in a type implementing the `Display`-trait,
// as well as a type implementing the `Marker`-trait.
// However, from the function parameter `&mut markers`, which refers to the
// `SimpleMarker`-storage, the necessary type of marker can be
// inferred, hence the `, _>´.
DeserializeComponents::<Combined, _>::deserialize(
&mut (pos, mass),
&ent,
&mut markers,
&mut alloc,
&mut de,
)
.unwrap_or_else(|e| eprintln!("Error: {}", e));
}
}
}
// If we run this system now, the `ENTITIES: &str` is going to be deserialized,
// and two entities are created.
Deserialize.run_now(&world);
// Printing the `Pos`-component storage entries to show the result of
// deserializing.
println!(
"{:#?}",
(&world.read_storage::<Pos>()).join().collect::<Vec<_>>()
);
}
| run |
mod.rs | #[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::AGC_CTRL_1 {
#[doc = r" Modifies the contents of the register"]
#[inline]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct BBA_ALT_CODER {
bits: u8,
}
impl BBA_ALT_CODER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct LNA_ALT_CODER {
bits: u8,
}
impl LNA_ALT_CODER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct LNA_USER_GAINR {
bits: u8,
}
impl LNA_USER_GAINR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct BBA_USER_GAINR {
bits: u8,
}
impl BBA_USER_GAINR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Value of the field"]
pub struct USER_LNA_GAIN_ENR {
bits: bool,
}
impl USER_LNA_GAIN_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = r" Value of the field"]
pub struct USER_BBA_GAIN_ENR {
bits: bool,
}
impl USER_BBA_GAIN_ENR {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
self.bits
}
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
}
#[doc = "Possible values of the field `PRESLOW_EN`"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PRESLOW_ENR {
#[doc = "Pre-slow is disabled."]
_0,
#[doc = "Pre-slow is enabled."]
_1,
}
impl PRESLOW_ENR {
#[doc = r" Returns `true` if the bit is clear (0)"]
#[inline]
pub fn bit_is_clear(&self) -> bool {
!self.bit()
}
#[doc = r" Returns `true` if the bit is set (1)"]
#[inline]
pub fn bit_is_set(&self) -> bool {
self.bit()
}
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bit(&self) -> bool {
match *self {
PRESLOW_ENR::_0 => false,
PRESLOW_ENR::_1 => true,
}
}
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _from(value: bool) -> PRESLOW_ENR {
match value {
false => PRESLOW_ENR::_0,
true => PRESLOW_ENR::_1,
}
}
#[doc = "Checks if the value of the field is `_0`"]
#[inline]
pub fn is_0(&self) -> bool {
*self == PRESLOW_ENR::_0
}
#[doc = "Checks if the value of the field is `_1`"]
#[inline]
pub fn is_1(&self) -> bool {
*self == PRESLOW_ENR::_1
}
}
#[doc = r" Value of the field"]
pub struct LNA_GAIN_SETTLE_TIMER {
bits: u8,
}
impl LNA_GAIN_SETTLE_TIMER {
#[doc = r" Value of the field as raw bits"]
#[inline]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _BBA_ALT_CODEW<'a> {
w: &'a mut W,
}
impl<'a> _BBA_ALT_CODEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _LNA_ALT_CODEW<'a> {
w: &'a mut W,
}
impl<'a> _LNA_ALT_CODEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 4;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _LNA_USER_GAINW<'a> {
w: &'a mut W,
}
impl<'a> _LNA_USER_GAINW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 12;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _BBA_USER_GAINW<'a> {
w: &'a mut W,
}
impl<'a> _BBA_USER_GAINW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 15;
const OFFSET: u8 = 16;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _USER_LNA_GAIN_ENW<'a> {
w: &'a mut W,
}
impl<'a> _USER_LNA_GAIN_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W |
}
#[doc = r" Proxy"]
pub struct _USER_BBA_GAIN_ENW<'a> {
w: &'a mut W,
}
impl<'a> _USER_BBA_GAIN_ENW<'a> {
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 21;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = "Values that can be written to the field `PRESLOW_EN`"]
pub enum PRESLOW_ENW {
#[doc = "Pre-slow is disabled."]
_0,
#[doc = "Pre-slow is enabled."]
_1,
}
impl PRESLOW_ENW {
#[allow(missing_docs)]
#[doc(hidden)]
#[inline]
pub fn _bits(&self) -> bool {
match *self {
PRESLOW_ENW::_0 => false,
PRESLOW_ENW::_1 => true,
}
}
}
#[doc = r" Proxy"]
pub struct _PRESLOW_ENW<'a> {
w: &'a mut W,
}
impl<'a> _PRESLOW_ENW<'a> {
#[doc = r" Writes `variant` to the field"]
#[inline]
pub fn variant(self, variant: PRESLOW_ENW) -> &'a mut W {
{
self.bit(variant._bits())
}
}
#[doc = "Pre-slow is disabled."]
#[inline]
pub fn _0(self) -> &'a mut W {
self.variant(PRESLOW_ENW::_0)
}
#[doc = "Pre-slow is enabled."]
#[inline]
pub fn _1(self) -> &'a mut W {
self.variant(PRESLOW_ENW::_1)
}
#[doc = r" Sets the field bit"]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r" Clears the field bit"]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r" Writes raw bits to the field"]
#[inline]
pub fn bit(self, value: bool) -> &'a mut W {
const MASK: bool = true;
const OFFSET: u8 = 22;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
#[doc = r" Proxy"]
pub struct _LNA_GAIN_SETTLE_TIMEW<'a> {
w: &'a mut W,
}
impl<'a> _LNA_GAIN_SETTLE_TIMEW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 255;
const OFFSET: u8 = 24;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline]
pub fn bits(&self) -> u32 {
self.bits
}
#[doc = "Bits 0:3 - BBA_ALT_CODE"]
#[inline]
pub fn bba_alt_code(&self) -> BBA_ALT_CODER {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
BBA_ALT_CODER { bits }
}
#[doc = "Bits 4:11 - LNA_ALT_CODE"]
#[inline]
pub fn lna_alt_code(&self) -> LNA_ALT_CODER {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 4;
((self.bits >> OFFSET) & MASK as u32) as u8
};
LNA_ALT_CODER { bits }
}
#[doc = "Bits 12:15 - LNA_USER_GAIN"]
#[inline]
pub fn lna_user_gain(&self) -> LNA_USER_GAINR {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 12;
((self.bits >> OFFSET) & MASK as u32) as u8
};
LNA_USER_GAINR { bits }
}
#[doc = "Bits 16:19 - BBA_USER_GAIN"]
#[inline]
pub fn bba_user_gain(&self) -> BBA_USER_GAINR {
let bits = {
const MASK: u8 = 15;
const OFFSET: u8 = 16;
((self.bits >> OFFSET) & MASK as u32) as u8
};
BBA_USER_GAINR { bits }
}
#[doc = "Bit 20 - User LNA Gain Enable"]
#[inline]
pub fn user_lna_gain_en(&self) -> USER_LNA_GAIN_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 20;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USER_LNA_GAIN_ENR { bits }
}
#[doc = "Bit 21 - User BBA Gain Enable"]
#[inline]
pub fn user_bba_gain_en(&self) -> USER_BBA_GAIN_ENR {
let bits = {
const MASK: bool = true;
const OFFSET: u8 = 21;
((self.bits >> OFFSET) & MASK as u32) != 0
};
USER_BBA_GAIN_ENR { bits }
}
#[doc = "Bit 22 - Pre-slow Enable"]
#[inline]
pub fn preslow_en(&self) -> PRESLOW_ENR {
PRESLOW_ENR::_from({
const MASK: bool = true;
const OFFSET: u8 = 22;
((self.bits >> OFFSET) & MASK as u32) != 0
})
}
#[doc = "Bits 24:31 - LNA_GAIN_SETTLE_TIME"]
#[inline]
pub fn lna_gain_settle_time(&self) -> LNA_GAIN_SETTLE_TIMER {
let bits = {
const MASK: u8 = 255;
const OFFSET: u8 = 24;
((self.bits >> OFFSET) & MASK as u32) as u8
};
LNA_GAIN_SETTLE_TIMER { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:3 - BBA_ALT_CODE"]
#[inline]
pub fn bba_alt_code(&mut self) -> _BBA_ALT_CODEW {
_BBA_ALT_CODEW { w: self }
}
#[doc = "Bits 4:11 - LNA_ALT_CODE"]
#[inline]
pub fn lna_alt_code(&mut self) -> _LNA_ALT_CODEW {
_LNA_ALT_CODEW { w: self }
}
#[doc = "Bits 12:15 - LNA_USER_GAIN"]
#[inline]
pub fn lna_user_gain(&mut self) -> _LNA_USER_GAINW {
_LNA_USER_GAINW { w: self }
}
#[doc = "Bits 16:19 - BBA_USER_GAIN"]
#[inline]
pub fn bba_user_gain(&mut self) -> _BBA_USER_GAINW {
_BBA_USER_GAINW { w: self }
}
#[doc = "Bit 20 - User LNA Gain Enable"]
#[inline]
pub fn user_lna_gain_en(&mut self) -> _USER_LNA_GAIN_ENW {
_USER_LNA_GAIN_ENW { w: self }
}
#[doc = "Bit 21 - User BBA Gain Enable"]
#[inline]
pub fn user_bba_gain_en(&mut self) -> _USER_BBA_GAIN_ENW {
_USER_BBA_GAIN_ENW { w: self }
}
#[doc = "Bit 22 - Pre-slow Enable"]
#[inline]
pub fn preslow_en(&mut self) -> _PRESLOW_ENW {
_PRESLOW_ENW { w: self }
}
#[doc = "Bits 24:31 - LNA_GAIN_SETTLE_TIME"]
#[inline]
pub fn lna_gain_settle_time(&mut self) -> _LNA_GAIN_SETTLE_TIMEW {
_LNA_GAIN_SETTLE_TIMEW { w: self }
}
}
| {
const MASK: bool = true;
const OFFSET: u8 = 20;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
} |
helpers.go | package reference
import "path"
// IsNameOnly returns true if reference only contains a repo name.
func IsNameOnly(ref Named) bool {
if _, ok := ref.(NamedTagged); ok {
return false
}
if _, ok := ref.(Canonical); ok {
return false
}
return true
}
// FamiliarName returns the familiar name string
// for the given named, familiarizing if needed.
func | (ref Named) string {
if nn, ok := ref.(normalizedNamed); ok {
return nn.Familiar().Name()
}
return ref.Name()
}
// FamiliarString returns the familiar string representation
// for the given reference, familiarizing if needed.
func FamiliarString(ref Reference) string {
if nn, ok := ref.(normalizedNamed); ok {
return nn.Familiar().String()
}
return ref.String()
}
// FamiliarMatch reports whether ref matches the specified pattern.
// See http://godoc.org/path#Match for supported patterns.
func FamiliarMatch(pattern string, ref Reference) (bool, error) {
matched, err := path.Match(pattern, FamiliarString(ref))
if namedRef, isNamed := ref.(Named); isNamed && !matched {
matched, _ = path.Match(pattern, FamiliarName(namedRef))
}
return matched, err
}
| FamiliarName |
registry_test.go | package lint |
"github.com/googleapis/api-linter/testutil"
"google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/descriptorpb"
)
func TestMakeRegistryFromAllFiles(t *testing.T) {
barProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "bar.proto",
Template: `syntax = "proto3";
message Bar {
string baz = 1;
}`,
})
fooProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "foo.proto",
Template: `syntax = "proto3";
import "bar.proto";
message Foo {
Bar bar = 1;
}`,
Deps: []*descriptorpb.FileDescriptorProto{barProto},
})
reg, err := makeRegistryFromAllFiles([]*descriptorpb.FileDescriptorProto{fooProto, barProto})
if err != nil {
t.Fatalf("makeRegistryFromAllFiles() returned error %q; want nil", err)
}
fooDesc, err := reg.FindDescriptorByName("Foo")
if err != nil {
t.Fatalf("reg.FindDescriptorByName(%q) returned error %q; want nil", fooProto.GetName(), err)
}
foo, ok := fooDesc.(protoreflect.MessageDescriptor)
if !ok {
t.Fatalf("reg.FindDescriptorByname(%q) returned a non-message descriptor: %+v", fooProto.GetName(), fooDesc)
}
if foo.Fields().Len() != 1 {
t.Fatalf("foo.Fields().Len()=%d; want 1", foo.Fields().Len())
}
if foo.Fields().Get(0).Message() == nil {
t.Fatalf("foo.Fields().Get(0).Message() was nil")
}
if foo.Fields().Get(0).Message().Name() != "Bar" {
t.Fatalf("foo.Fields().Get(0).Message().Name() = %q; want %q", foo.Fields().Get(0).Message().Name(), "Bar")
}
if foo.Fields().Get(0).Message().IsPlaceholder() {
t.Fatalf("foo.Fields().Get(0).IsPlaceholder()=true; want false")
}
}
func TestMakeRegistryFromAllFiles_DirectAndIndirectDependencies(t *testing.T) {
barProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "bar.proto",
Template: `syntax = "proto3";
message Bar {
string baz = 1;
}`,
})
fooProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "foo.proto",
Template: `syntax = "proto3";
import "bar.proto";
message Foo {
Bar bar = 1;
}`,
Deps: []*descriptorpb.FileDescriptorProto{barProto},
})
bazProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "baz.proto",
Template: `syntax = "proto3";
import "bar.proto";
import "foo.proto";
message Baz {
Foo foo = 1;
Bar bar = 2;
}
`,
Deps: []*descriptorpb.FileDescriptorProto{barProto, fooProto},
})
reg, err := makeRegistryFromAllFiles([]*descriptorpb.FileDescriptorProto{fooProto, barProto, bazProto})
if err != nil {
t.Fatalf("makeRegistryFromAllFiles() returned error %q; want nil", err)
}
fooDesc, err := reg.FindDescriptorByName("Foo")
if err != nil {
t.Fatalf("reg.FindMessageByName(%q) returned error %q; want nil", "Foo", err)
}
foo, ok := fooDesc.(protoreflect.MessageDescriptor)
if !ok {
t.Fatalf("reg.FindDescriptorByname(%q) returned a non-message descriptor: %+v", fooProto.GetName(), fooDesc)
}
if foo.Fields().Len() != 1 {
t.Fatalf("foo.Fields().Len()=%d; want 1", foo.Fields().Len())
}
if foo.Fields().Get(0).Message() == nil {
t.Fatalf("foo.Fields().Get(0).Message() was nil")
}
if foo.Fields().Get(0).Message().Name() != "Bar" {
t.Fatalf("foo.Fields().Get(0).Message().Name() = %q; want %q", foo.Fields().Get(0).Message().Name(), "Bar")
}
if foo.Fields().Get(0).Message().IsPlaceholder() {
t.Fatalf("foo.Fields().Get(0).IsPlaceholder()=true; want false")
}
bazDesc, err := reg.FindDescriptorByName("Baz")
if err != nil {
t.Fatalf("reg.FindMessageByName(%q) returned error %q; want nil", "Baz", err)
}
baz, ok := bazDesc.(protoreflect.MessageDescriptor)
if !ok {
t.Fatalf("reg.FindDescriptorByname(%q) returned a non-message descriptor: %+v", bazProto.GetName(), bazDesc)
}
if baz.Fields().Len() != 2 {
t.Fatalf("baz.Fields.Len()=%d; want 2", baz.Fields().Len())
}
if baz.Fields().Get(0).Message() == nil {
t.Fatalf("baz.Fields().Get(0).Message() was nil")
}
if baz.Fields().Get(0).Message().Name() != "Foo" {
t.Fatalf("baz.Fields().Get(0).Message().Name() = %q; want %q", baz.Fields().Get(1).Message().Name(), "Foo")
}
if baz.Fields().Get(0).Message().IsPlaceholder() {
t.Fatalf("baz.Fields().Get(0).IsPlaceholder()=true; want false")
}
if baz.Fields().Get(1).Message() == nil {
t.Fatalf("baz.Fields().Get(1).Message() was nil")
}
if baz.Fields().Get(1).Message().Name() != "Bar" {
t.Fatalf("baz.Fields().Get(1).Message().Name() = %q; want %q", baz.Fields().Get(1).Message().Name(), "Bar")
}
if baz.Fields().Get(1).Message().IsPlaceholder() {
t.Fatalf("baz.Fields().Get(1).IsPlaceholder()=true; want false")
}
}
func TestMakeRegistryFromAllFiles_MissingImports(t *testing.T) {
barProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "bar.proto",
Template: `syntax = "proto3";
message Bar {
string baz = 1;
}`,
})
fooProto := testutil.MustCreateFileDescriptorProto(t, testutil.FileDescriptorSpec{
Filename: "foo.proto",
Template: `syntax = "proto3";
import "bar.proto";
message Foo {
Bar bar = 1;
}`,
Deps: []*descriptorpb.FileDescriptorProto{barProto},
})
reg, err := makeRegistryFromAllFiles([]*descriptorpb.FileDescriptorProto{fooProto})
if err != nil {
t.Fatalf("makeRegistryFromAllFiles() returned error %q; want nil", err)
}
fooDesc, err := reg.FindDescriptorByName("Foo")
if err != nil {
t.Fatalf("Failed to find Foo message: %s.", err)
}
foo, ok := fooDesc.(protoreflect.MessageDescriptor)
if !ok {
t.Fatalf("reg.FindDescriptorByname(%q) returned a non-message descriptor: %+v", fooProto.GetName(), fooDesc)
}
if foo.Fields().Len() != 1 {
t.Fatalf("foo.Fields().Len()=%d; want 1", foo.Fields().Len())
}
if foo.Fields().Get(0).Message() == nil {
t.Fatalf("foo.Fields().Get(0).Message() was nil")
}
if foo.Fields().Get(0).Message().Name() != "Bar" {
t.Fatalf("foo.Fields().Get(0).Message().Name() = %q; want %q", foo.Fields().Get(0).Message().Name(), "Bar")
}
if !foo.Fields().Get(0).Message().IsPlaceholder() {
t.Fatalf("foo.Fields().Get(0).IsPlaceholder()=true; want false")
}
} |
import (
"testing" |
insert.rs | // Copyright 2020 Alex Dukhno
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use super::*;
use sql_types::SqlType;
#[rstest::rstest]
fn insert_into_non_existent_schema(mut storage: PersistentStorage) {
assert_eq!(
storage
.insert_into("non_existent", "not_existed", vec![], vec![vec!["123".to_owned()]])
.expect("no system errors"),
Err(OperationOnTableError::SchemaDoesNotExist)
);
}
#[rstest::rstest]
fn insert_into_non_existent_table(mut storage: PersistentStorage) {
create_schema(&mut storage, "schema_name");
assert_eq!(
storage
.insert_into("schema_name", "not_existed", vec![], vec![vec!["123".to_owned()]])
.expect("no system errors"),
Err(OperationOnTableError::TableDoesNotExist)
);
}
#[rstest::rstest]
fn insert_many_rows_into_table(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![("column_test", SqlType::SmallInt)],
);
insert_into(&mut storage, "schema_name", "table_name", vec![], vec!["123"]);
insert_into(&mut storage, "schema_name", "table_name", vec![], vec!["456"]);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![("column_test".to_owned(), SqlType::SmallInt)],
vec![vec!["123".to_owned()], vec!["456".to_owned()]]
))
);
}
#[rstest::rstest]
fn insert_multiple_values_rows(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_1", SqlType::SmallInt),
("column_2", SqlType::SmallInt),
("column_3", SqlType::SmallInt),
],
);
insert_into(&mut storage, "schema_name", "table_name", vec![], vec!["1", "2", "3"]);
insert_into(&mut storage, "schema_name", "table_name", vec![], vec!["4", "5", "6"]);
insert_into(&mut storage, "schema_name", "table_name", vec![], vec!["7", "8", "9"]);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![
("column_1".to_owned(), SqlType::SmallInt),
("column_2".to_owned(), SqlType::SmallInt),
("column_3".to_owned(), SqlType::SmallInt)
],
vec![
vec!["1".to_owned(), "2".to_owned(), "3".to_owned()],
vec!["4".to_owned(), "5".to_owned(), "6".to_owned()],
vec!["7".to_owned(), "8".to_owned(), "9".to_owned()],
],
))
);
}
#[rstest::rstest]
fn insert_named_columns(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_1", SqlType::SmallInt),
("column_2", SqlType::Char(10)),
("column_3", SqlType::BigInt),
],
);
let columns = vec!["column_3", "column_2", "column_1"];
insert_into(
&mut storage,
"schema_name",
"table_name",
columns.clone(),
vec!["1", "2", "3"],
);
insert_into(
&mut storage,
"schema_name",
"table_name",
columns.clone(),
vec!["4", "5", "6"],
);
insert_into(
&mut storage,
"schema_name",
"table_name",
columns.clone(),
vec!["7", "8", "9"],
);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![
("column_1".to_owned(), SqlType::SmallInt),
("column_2".to_owned(), SqlType::Char(10)),
("column_3".to_owned(), SqlType::BigInt)
],
vec![
vec!["3".to_owned(), "2".to_owned(), "1".to_owned()],
vec!["6".to_owned(), "5".to_owned(), "4".to_owned()],
vec!["9".to_owned(), "8".to_owned(), "7".to_owned()],
],
))
);
}
#[rstest::rstest]
fn insert_named_not_existed_column(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_1", SqlType::SmallInt),
("column_2", SqlType::Char(10)),
("column_3", SqlType::BigInt),
],
);
let columns = vec![
"column_3".to_owned(),
"column_2".to_owned(),
"column_1".to_owned(),
"not_existed".to_owned(),
];
assert_eq!(
storage
.insert_into(
"schema_name",
"table_name",
columns,
vec![vec!["1".to_owned(), "2".to_owned(), "3".to_owned(), "4".to_owned()]],
)
.expect("no system errors"),
Err(OperationOnTableError::ColumnDoesNotExist( | ))
)
}
#[rstest::rstest]
fn insert_row_into_table(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![("column_test", SqlType::SmallInt)],
);
assert_eq!(
storage
.insert_into("schema_name", "table_name", vec![], vec![vec!["123".to_owned()]])
.expect("no system errors"),
Ok(())
);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![("column_test".to_owned(), SqlType::SmallInt)],
vec![vec!["123".to_owned()]]
))
);
}
#[rstest::rstest]
fn insert_too_many_expressions(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_1", SqlType::SmallInt),
("column_2", SqlType::Char(10)),
("column_3", SqlType::BigInt),
],
);
let columns = vec![];
assert_eq!(
storage
.insert_into(
"schema_name",
"table_name",
columns,
vec![vec!["1".to_owned(), "2".to_owned(), "3".to_owned(), "4".to_owned()]],
)
.expect("no system errors"),
Err(OperationOnTableError::InsertTooManyExpressions)
);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![
("column_1".to_owned(), SqlType::SmallInt),
("column_2".to_owned(), SqlType::Char(10)),
("column_3".to_owned(), SqlType::BigInt),
],
vec![]
))
);
}
#[rstest::rstest]
fn insert_too_many_expressions_labeled(mut storage: PersistentStorage) {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_1", SqlType::SmallInt),
("column_2", SqlType::Char(10)),
("column_3", SqlType::BigInt),
],
);
let columns = vec!["column_3".to_owned(), "column_2".to_owned(), "column_1".to_owned()];
assert_eq!(
storage
.insert_into(
"schema_name",
"table_name",
columns,
vec![vec!["1".to_owned(), "2".to_owned(), "3".to_owned(), "4".to_owned()]],
)
.expect("no system errors"),
Err(OperationOnTableError::InsertTooManyExpressions)
);
let table_columns = storage
.table_columns("schema_name", "table_name")
.expect("no system errors")
.into_iter()
.map(|(name, _sql_type)| name)
.collect();
assert_eq!(
storage
.select_all_from("schema_name", "table_name", table_columns)
.expect("no system errors"),
Ok((
vec![
("column_1".to_owned(), SqlType::SmallInt),
("column_2".to_owned(), SqlType::Char(10)),
("column_3".to_owned(), SqlType::BigInt),
],
vec![]
))
);
}
#[cfg(test)]
mod constraints {
use super::*;
#[rstest::fixture]
fn storage_with_ints_table(mut storage: PersistentStorage) -> PersistentStorage {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![
("column_si", SqlType::SmallInt),
("column_i", SqlType::Integer),
("column_bi", SqlType::BigInt),
],
);
storage
}
#[rstest::fixture]
fn storage_with_chars_table(mut storage: PersistentStorage) -> PersistentStorage {
create_schema_with_table(
&mut storage,
"schema_name",
"table_name",
vec![("column_c", SqlType::Char(10)), ("column_vc", SqlType::VarChar(10))],
);
storage
}
#[rstest::rstest]
fn out_of_range_violation(mut storage_with_ints_table: PersistentStorage) {
assert_eq!(
storage_with_ints_table
.insert_into(
"schema_name",
"table_name",
vec![],
vec![vec!["-32769".to_owned(), "100".to_owned(), "100".to_owned()]],
)
.expect("no system errors"),
Err(constraint_violations(
ConstraintError::OutOfRange,
vec![vec![("column_si".to_owned(), SqlType::SmallInt)]]
))
);
}
#[rstest::rstest]
fn not_an_int_violation(mut storage_with_ints_table: PersistentStorage) {
assert_eq!(
storage_with_ints_table
.insert_into(
"schema_name",
"table_name",
vec![],
vec![vec!["abc".to_owned(), "100".to_owned(), "100".to_owned()]],
)
.expect("no system errors"),
Err(constraint_violations(
ConstraintError::NotAnInt,
vec![vec![("column_si".to_owned(), SqlType::SmallInt)]]
))
);
}
#[rstest::rstest]
fn value_too_long_violation(mut storage_with_chars_table: PersistentStorage) {
assert_eq!(
storage_with_chars_table
.insert_into(
"schema_name",
"table_name",
vec![],
vec![vec!["12345678901".to_owned(), "100".to_owned()]],
)
.expect("no system errors"),
Err(constraint_violations(
ConstraintError::ValueTooLong,
vec![vec![("column_c".to_owned(), SqlType::Char(10))]]
))
);
}
#[rstest::rstest]
fn multiple_columns_single_row_violation(mut storage_with_ints_table: PersistentStorage) {
assert_eq!(
storage_with_ints_table
.insert_into(
"schema_name",
"table_name",
vec![],
vec![vec!["-32769".to_owned(), "-2147483649".to_owned(), "100".to_owned()]],
)
.expect("no system errors"),
Err(constraint_violations(
ConstraintError::OutOfRange,
vec![vec![
("column_si".to_owned(), SqlType::SmallInt),
("column_i".to_owned(), SqlType::Integer)
]]
))
)
}
#[rstest::rstest]
fn multiple_columns_multiple_row_violation(mut storage_with_ints_table: PersistentStorage) {
assert_eq!(
storage_with_ints_table
.insert_into(
"schema_name",
"table_name",
vec![],
vec![
vec!["-32769".to_owned(), "-2147483649".to_owned(), "100".to_owned()],
vec![
"100".to_owned(),
"-2147483649".to_owned(),
"-9223372036854775809".to_owned()
],
],
)
.expect("no system errors"),
Err(constraint_violations(
ConstraintError::OutOfRange,
vec![
vec![
("column_si".to_owned(), SqlType::SmallInt),
("column_i".to_owned(), SqlType::Integer)
],
vec![
("column_i".to_owned(), SqlType::Integer),
("column_bi".to_owned(), SqlType::BigInt)
]
]
))
)
}
fn constraint_violations(error: ConstraintError, columns: Vec<Vec<(String, SqlType)>>) -> OperationOnTableError {
let mut map = HashMap::new();
map.insert(error, columns);
OperationOnTableError::ConstraintViolation(map)
}
} | vec!["not_existed".to_owned()] |
record_episode_statistics.py | import time
from collections import deque
import gym
class RecordEpisodeStatistics(gym.Wrapper):
def __init__(self, env, deque_size=100):
super().__init__(env)
self.t0 = time.perf_counter()
self.episode_return = 0.0
self.episode_horizon = 0
self.return_queue = deque(maxlen=deque_size)
self.horizon_queue = deque(maxlen=deque_size)
def reset(self, **kwargs):
observation = super().reset(**kwargs)
self.episode_return = 0.0
self.episode_horizon = 0
return observation
def step(self, action):
| observation, reward, done, info = super().step(action)
self.episode_return += reward
self.episode_horizon += 1
if done:
info['episode'] = {'return': self.episode_return,
'horizon': self.episode_horizon,
'time': round(time.perf_counter() - self.t0, 4)}
self.return_queue.append(self.episode_return)
self.horizon_queue.append(self.episode_horizon)
self.episode_return = 0.0
self.episode_horizon = 0
return observation, reward, done, info |
Subsets and Splits