file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
treeItemSelectableComponent.tsx
|
import { Nullable } from "babylonjs/types";
import { IExplorerExtensibilityGroup } from "babylonjs/Debug/debugLayer";
import { TreeItemSpecializedComponent } from "./treeItemSpecializedComponent";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import { faMinus, faPlus } from "@fortawesome/free-solid-svg-icons";
import { Tools } from "../../tools";
import * as ReactDOM from "react-dom";
import * as React from "react";
import { GlobalState } from "../globalState";
export interface ITreeItemSelectableComponentProps {
entity: any,
selectedEntity?: any,
mustExpand?: boolean,
offset: number,
globalState: GlobalState,
extensibilityGroups?: IExplorerExtensibilityGroup[],
filter: Nullable<string>
}
export class TreeItemSelectableComponent extends React.Component<ITreeItemSelectableComponentProps, { isExpanded: boolean, isSelected: boolean }> {
private _wasSelected = false;
|
this.state = { isSelected: this.props.entity === this.props.selectedEntity, isExpanded: this.props.mustExpand || Tools.LookForItem(this.props.entity, this.props.selectedEntity) };
}
switchExpandedState(): void {
this.setState({ isExpanded: !this.state.isExpanded });
}
shouldComponentUpdate(nextProps: ITreeItemSelectableComponentProps, nextState: { isExpanded: boolean, isSelected: boolean }) {
if (!nextState.isExpanded && this.state.isExpanded) {
return true;
}
if (nextProps.selectedEntity) {
if (nextProps.entity === nextProps.selectedEntity) {
nextState.isSelected = true;
return true;
} else {
nextState.isSelected = false;
}
if (Tools.LookForItem(nextProps.entity, nextProps.selectedEntity)) {
nextState.isExpanded = true;
return true;
}
}
return true;
}
scrollIntoView() {
const element = ReactDOM.findDOMNode(this) as Element;
if (element) {
element.scrollIntoView(false);
}
}
componentDidMount() {
if (this.state.isSelected) {
this.scrollIntoView();
}
}
componentDidUpdate() {
if (this.state.isSelected && !this._wasSelected) {
this.scrollIntoView();
}
this._wasSelected = false;
}
onSelect() {
if (!this.props.globalState.onSelectionChangedObservable) {
return;
}
this._wasSelected = true;
const entity = this.props.entity;
this.props.globalState.onSelectionChangedObservable.notifyObservers(entity);
}
renderChildren() {
const entity = this.props.entity;
if (!entity.getChildren && !entity.children || !this.state.isExpanded) {
return null;
}
const children = Tools.SortAndFilter(entity, entity.getChildren ? entity.getChildren() : entity.children);
return (
children.map(item => {
return (
<TreeItemSelectableComponent globalState={this.props.globalState} mustExpand={this.props.mustExpand} extensibilityGroups={this.props.extensibilityGroups} selectedEntity={this.props.selectedEntity} key={item.uniqueId} offset={this.props.offset + 2} entity={item} filter={this.props.filter} />
);
})
)
}
render() {
const marginStyle = {
paddingLeft: (10 * (this.props.offset + 0.5)) + "px"
};
const entity = this.props.entity;
const chevron = this.state.isExpanded ? <FontAwesomeIcon icon={faMinus} /> : <FontAwesomeIcon icon={faPlus} />
const children = entity.getClassName() === "MultiMaterial" ? [] : Tools.SortAndFilter(entity, entity.getChildren ? entity.getChildren() : entity.children);
const hasChildren = children.length > 0;
if (!entity.reservedDataStore) {
entity.reservedDataStore = {};
}
entity.reservedDataStore.setExpandedState = (value: boolean) => {
this.setState({ isExpanded: value });
}
entity.reservedDataStore.isExpanded = this.state.isExpanded;
if (this.props.filter) {
const lowerCaseFilter = this.props.filter.toLowerCase();
if (!entity.name || entity.name.toLowerCase().indexOf(lowerCaseFilter) === -1) {
if (!hasChildren) {
return null;
}
if (entity.getDescendants) {
if (entity.getDescendants(false, (n: any) => {
return n.name && n.name.toLowerCase().indexOf(lowerCaseFilter) !== -1
}).length === 0) {
return null;
}
}
}
}
return (
<div>
<div className={this.state.isSelected ? "itemContainer selected" : "itemContainer"} style={marginStyle} >
{
hasChildren &&
<div className="arrow icon" onClick={() => this.switchExpandedState()}>
{chevron}
</div>
}
<TreeItemSpecializedComponent globalState={this.props.globalState} extensibilityGroups={this.props.extensibilityGroups} label={entity.name} entity={entity} onClick={() => this.onSelect()} />
</div>
{
this.renderChildren()
}
</div >
);
}
}
|
constructor(props: ITreeItemSelectableComponentProps) {
super(props);
|
Navigation.tsx
|
import React from 'react';
import {Scrollable} from '../Scrollable';
import {classNames} from '../../utilities/css';
import {useTheme} from '../../utilities/theme';
|
import {Image} from '../Image';
import {UnstyledLink} from '../UnstyledLink';
import {getWidth} from '../../utilities/get-width';
import {NavigationContext} from './context';
import {Section, Item} from './components';
import {SectionType} from './types';
import styles from './Navigation.scss';
export interface NavigationProps {
location: string;
sections?: SectionType[];
children?: React.ReactNode;
contextControl?: React.ReactNode;
onDismiss?(): void;
}
export const Navigation: React.FunctionComponent<NavigationProps> & {
Item: typeof Item;
Section: typeof Section;
} = function Navigation({
children,
contextControl,
location,
onDismiss,
}: NavigationProps) {
const {logo} = useTheme();
const {newDesignLanguage} = useFeatures();
const width = getWidth(logo, 104);
const logoMarkup =
logo && newDesignLanguage ? (
<div className={styles.LogoContainer}>
<UnstyledLink
url={logo.url || ''}
className={styles.LogoLink}
style={{width}}
>
<Image
source={logo.topBarSource || ''}
alt={logo.accessibilityLabel || ''}
className={styles.Logo}
style={{width}}
/>
</UnstyledLink>
</div>
) : null;
const mediaMarkup = contextControl ? (
<div className={styles.ContextControl}>{contextControl}</div>
) : (
logoMarkup
);
const className = classNames(
styles.Navigation,
!mediaMarkup && newDesignLanguage && styles['Navigation-noMedia'],
newDesignLanguage && styles['Navigation-newDesignLanguage'],
);
const context = {
location,
onNavigationDismiss: onDismiss,
};
return (
<NavigationContext.Provider value={context}>
<WithinContentContext.Provider value>
<nav className={className}>
{mediaMarkup}
<Scrollable className={styles.PrimaryNavigation}>
{children}
</Scrollable>
</nav>
</WithinContentContext.Provider>
</NavigationContext.Provider>
);
};
Navigation.Item = Item;
Navigation.Section = Section;
|
import {useFeatures} from '../../utilities/features';
import {WithinContentContext} from '../../utilities/within-content-context';
|
get_http_response_rule.go
|
// Code generated by go-swagger; DO NOT EDIT.
// Copyright 2019 HAProxy Technologies
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package http_response_rule
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"net/http"
errors "github.com/go-openapi/errors"
middleware "github.com/go-openapi/runtime/middleware"
strfmt "github.com/go-openapi/strfmt"
swag "github.com/go-openapi/swag"
"github.com/haproxytech/models"
)
// GetHTTPResponseRuleHandlerFunc turns a function with the right signature into a get HTTP response rule handler
type GetHTTPResponseRuleHandlerFunc func(GetHTTPResponseRuleParams, interface{}) middleware.Responder
// Handle executing the request and returning a response
func (fn GetHTTPResponseRuleHandlerFunc) Handle(params GetHTTPResponseRuleParams, principal interface{}) middleware.Responder {
return fn(params, principal)
}
// GetHTTPResponseRuleHandler interface for that can handle valid get HTTP response rule params
type GetHTTPResponseRuleHandler interface {
Handle(GetHTTPResponseRuleParams, interface{}) middleware.Responder
}
// NewGetHTTPResponseRule creates a new http.Handler for the get HTTP response rule operation
func NewGetHTTPResponseRule(ctx *middleware.Context, handler GetHTTPResponseRuleHandler) *GetHTTPResponseRule
|
/*GetHTTPResponseRule swagger:route GET /services/haproxy/configuration/http_response_rules/{id} HTTPResponseRule getHttpResponseRule
Return one HTTP Response Rule
Returns one HTTP Response Rule configuration by it's ID in the specified parent.
*/
type GetHTTPResponseRule struct {
Context *middleware.Context
Handler GetHTTPResponseRuleHandler
}
func (o *GetHTTPResponseRule) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
route, rCtx, _ := o.Context.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
var Params = NewGetHTTPResponseRuleParams()
uprinc, aCtx, err := o.Context.Authorize(r, route)
if err != nil {
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
if aCtx != nil {
r = aCtx
}
var principal interface{}
if uprinc != nil {
principal = uprinc
}
if err := o.Context.BindValidRequest(r, route, &Params); err != nil { // bind params
o.Context.Respond(rw, r, route.Produces, route, err)
return
}
res := o.Handler.Handle(Params, principal) // actually handle the request
o.Context.Respond(rw, r, route.Produces, route, res)
}
// GetHTTPResponseRuleOKBody get HTTP response rule o k body
// swagger:model GetHTTPResponseRuleOKBody
type GetHTTPResponseRuleOKBody struct {
// version
Version int64 `json:"_version,omitempty"`
// data
Data *models.HTTPResponseRule `json:"data,omitempty"`
}
// Validate validates this get HTTP response rule o k body
func (o *GetHTTPResponseRuleOKBody) Validate(formats strfmt.Registry) error {
var res []error
if err := o.validateData(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (o *GetHTTPResponseRuleOKBody) validateData(formats strfmt.Registry) error {
if swag.IsZero(o.Data) { // not required
return nil
}
if o.Data != nil {
if err := o.Data.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("getHttpResponseRuleOK" + "." + "data")
}
return err
}
}
return nil
}
// MarshalBinary interface implementation
func (o *GetHTTPResponseRuleOKBody) MarshalBinary() ([]byte, error) {
if o == nil {
return nil, nil
}
return swag.WriteJSON(o)
}
// UnmarshalBinary interface implementation
func (o *GetHTTPResponseRuleOKBody) UnmarshalBinary(b []byte) error {
var res GetHTTPResponseRuleOKBody
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*o = res
return nil
}
|
{
return &GetHTTPResponseRule{Context: ctx, Handler: handler}
}
|
admin.py
|
from django.contrib import admin
from catalog.models import Author, Genre, Book, BookInstance, Language
# admin.site.register(Book)
|
admin.site.register(Language)
# admin.site.register(BookInstance)
@admin.register(Author)
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name','first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name','last_name', ('date_of_birth','date_of_death')]
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_display = ('book', 'status', 'borrower', 'due_back', 'id')
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book','imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back','borrower')
}),
)
|
# admin.site.register(Author)
admin.site.register(Genre)
|
target.py
|
import gzip
import natsort
from Bio import SeqIO
def
|
(infile: str, outfile) -> None:
contig_hsh = {}
SEX_CHROMOSOME = ("W", "X", "Y", "Z")
sequences = SeqIO.parse(infile, "fasta") if infile.endswith(".fasta") else SeqIO.parse(gzip.open(infile, "rt"), "fasta")
for i in sequences:
contig = i.id
contig_length = len(i.seq)
if (contig.startswith("Super") or contig.startswith("SUPER")) and not "unloc" in contig and not contig.endswith(SEX_CHROMOSOME) and contig_length > 1000000:
contig_hsh[contig] = contig_length
contig_lst = natsort.natsorted(list(contig_hsh.keys()))
for contig in contig_lst:
outfile.write("{}\n".format(contig))
|
get_target
|
Slide.js
|
// @flow
import React, { PureComponent } from "react";
import classnames from "classnames";
type Props = {
children: any,
className: string
};
class Slide extends PureComponent<Props> {
render() {
const { children, className } = this.props;
return <section className={classnames(className)}>{children}</section>;
}
}
|
export default Slide;
|
|
httpserver.go
|
// Copyright 2021 by LunaSec (owned by Refinery Labs, Inc)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package tokenizer
import (
"fmt"
"log"
"net/http"
apigateway "github.com/apex/gateway"
"github.com/lunasec-io/lunasec-monorepo/constants"
"github.com/lunasec-io/lunasec-monorepo/gateway"
"github.com/lunasec-io/lunasec-monorepo/service"
"github.com/lunasec-io/lunasec-monorepo/util"
"github.com/rs/cors"
)
func newServer(configPath string, authType constants.AuthType) http.Handler
|
func newHttpServer(sm http.Handler) *http.Server {
addr := util.GetEnvWithFallback("TOKENIZER_HTTP_ADDR", "0.0.0.0:37767")
server := &http.Server{
Addr: addr,
Handler: sm,
MaxHeaderBytes: 2 << 20, // 2 MB
}
log.Printf("HTTP server listening at %s\n", addr)
return server
}
func NewLocalDevServer() *http.Server {
sm := newServer(constants.TokenizerConfigPath, constants.JwtAuthType)
return newHttpServer(sm)
}
func NewApiGatewayServer() *apigateway.Gateway {
sm := newServer(constants.TokenizerConfigPath, constants.JwtAuthType)
return apigateway.NewGateway(sm)
}
// NewHttpServerSidecar creates a new server with no authentication, and is meant to run as a sidecar in a container.
// NOTE: auth is assumed to have already been performed when invoking this service.
func NewHttpServerSidecar() *http.Server {
sm := newServer(constants.TokenizerConfigPath, constants.NoAuthType)
return newHttpServer(sm)
}
|
{
sm := http.NewServeMux()
logger, err := util.GetLogger()
if err != nil {
fmt.Println(err)
panic(err)
}
util.ApplyHealthCheck(sm, logger)
provider := util.GetConfigProviderFromDir(configPath)
logger.Debug("loading AWS gateways")
gateways := gateway.GetAwsGateways(logger, provider)
authProviderJwtVerifier := service.NewJwtVerifier(constants.AuthJwtVerifier, logger, provider)
GetTokenizerRoutes(authType, sm, logger, provider, gateways, authProviderJwtVerifier)
c := cors.New(cors.Options{})
return c.Handler(sm)
}
|
WatchExpressionsSidebarPane.js
|
/*
* Copyright (C) IBM Corp. 2009 All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of IBM Corp. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @implements {UI.ActionDelegate}
* @implements {UI.ToolbarItem.ItemsProvider}
* @implements {UI.ContextMenu.Provider}
* @unrestricted
*/
Sources.WatchExpressionsSidebarPane = class extends UI.ThrottledWidget {
constructor() {
super(true);
this.registerRequiredCSS('object_ui/objectValue.css');
this.registerRequiredCSS('sources/watchExpressionsSidebarPane.css');
/** @type {!Array.<!Sources.WatchExpression>} */
this._watchExpressions = [];
this._watchExpressionsSetting = Common.settings.createLocalSetting('watchExpressions', []);
this._addButton = new UI.ToolbarButton(Common.UIString('Add expression'), 'largeicon-add');
this._addButton.addEventListener(UI.ToolbarButton.Events.Click, this._addButtonClicked.bind(this));
this._refreshButton = new UI.ToolbarButton(Common.UIString('Refresh'), 'largeicon-refresh');
this._refreshButton.addEventListener(UI.ToolbarButton.Events.Click, this.update, this);
this.contentElement.classList.add('watch-expressions');
this.contentElement.addEventListener('contextmenu', this._contextMenu.bind(this), false);
this._expandController = new ObjectUI.ObjectPropertiesSectionExpandController();
UI.context.addFlavorChangeListener(SDK.ExecutionContext, this.update, this);
UI.context.addFlavorChangeListener(SDK.DebuggerModel.CallFrame, this.update, this);
this._linkifier = new Components.Linkifier();
this.update();
}
/**
* @override
* @return {!Array<!UI.ToolbarItem>}
*/
toolbarItems() {
return [this._addButton, this._refreshButton];
}
/**
* @return {boolean}
*/
hasExpressions() {
return !!this._watchExpressionsSetting.get().length;
}
_saveExpressions() {
const toSave = [];
for (let i = 0; i < this._watchExpressions.length; i++) {
if (this._watchExpressions[i].expression())
toSave.push(this._watchExpressions[i].expression());
}
this._watchExpressionsSetting.set(toSave);
}
async _addButtonClicked() {
await UI.viewManager.showView('sources.watch');
this._createWatchExpression(null).startEditing();
}
/**
* @override
* @return {!Promise.<?>}
*/
doUpdate() {
this._linkifier.reset();
this.contentElement.removeChildren();
this._watchExpressions = [];
this._emptyElement = this.contentElement.createChild('div', 'gray-info-message');
this._emptyElement.textContent = Common.UIString('No watch expressions');
const watchExpressionStrings = this._watchExpressionsSetting.get();
for (let i = 0; i < watchExpressionStrings.length; ++i) {
const expression = watchExpressionStrings[i];
if (!expression)
continue;
this._createWatchExpression(expression);
}
return Promise.resolve();
}
/**
* @param {?string} expression
* @return {!Sources.WatchExpression}
*/
_createWatchExpression(expression) {
this._emptyElement.classList.add('hidden');
const watchExpression = new Sources.WatchExpression(expression, this._expandController, this._linkifier);
watchExpression.addEventListener(
Sources.WatchExpression.Events.ExpressionUpdated, this._watchExpressionUpdated, this);
this.contentElement.appendChild(watchExpression.element());
this._watchExpressions.push(watchExpression);
return watchExpression;
}
/**
* @param {!Common.Event} event
*/
_watchExpressionUpdated(event) {
const watchExpression = /** @type {!Sources.WatchExpression} */ (event.data);
if (!watchExpression.expression()) {
this._watchExpressions.remove(watchExpression);
this.contentElement.removeChild(watchExpression.element());
this._emptyElement.classList.toggle('hidden', !!this._watchExpressions.length);
}
this._saveExpressions();
}
/**
* @param {!Event} event
*/
_contextMenu(event) {
const contextMenu = new UI.ContextMenu(event);
this._populateContextMenu(contextMenu, event);
contextMenu.show();
}
/**
* @param {!UI.ContextMenu} contextMenu
* @param {!Event} event
*/
_populateContextMenu(contextMenu, event) {
let isEditing = false;
for (const watchExpression of this._watchExpressions)
isEditing |= watchExpression.isEditing();
if (!isEditing)
contextMenu.debugSection().appendItem(Common.UIString('Add watch expression'), this._addButtonClicked.bind(this));
if (this._watchExpressions.length > 1) {
contextMenu.debugSection().appendItem(
Common.UIString('Delete all watch expressions'), this._deleteAllButtonClicked.bind(this));
}
const target = event.deepElementFromPoint();
if (!target)
return;
for (const watchExpression of this._watchExpressions) {
if (watchExpression.element().isSelfOrAncestor(target))
watchExpression._populateContextMenu(contextMenu, event);
}
}
_deleteAllButtonClicked() {
this._watchExpressions = [];
this._saveExpressions();
this.update();
}
/**
* @param {string} expression
*/
_focusAndAddExpressionToWatch(expression) {
UI.viewManager.showView('sources.watch');
this.doUpdate();
this._addExpressionToWatch(expression);
}
/**
* @param {string} expression
*/
_addExpressionToWatch(expression) {
this._createWatchExpression(expression);
this._saveExpressions();
}
/**
* @override
* @param {!UI.Context} context
* @param {string} actionId
* @return {boolean}
*/
handleAction(context, actionId) {
const frame = UI.context.flavor(Sources.UISourceCodeFrame);
if (!frame)
return false;
const text = frame.textEditor.text(frame.textEditor.selection());
this._focusAndAddExpressionToWatch(text);
return true;
}
/**
* @param {!ObjectUI.ObjectPropertyTreeElement} target
*/
_addPropertyPathToWatch(target) {
this._addExpressionToWatch(target.path());
}
/**
* @override
* @param {!Event} event
* @param {!UI.ContextMenu} contextMenu
* @param {!Object} target
*/
appendApplicableItems(event, contextMenu, target) {
if (target instanceof ObjectUI.ObjectPropertyTreeElement && !target.property.synthetic) {
contextMenu.debugSection().appendItem(
ls`Add property path to watch`, this._addPropertyPathToWatch.bind(this, target));
}
const frame = UI.context.flavor(Sources.UISourceCodeFrame);
if (!frame || frame.textEditor.selection().isEmpty())
return;
contextMenu.debugSection().appendAction('sources.add-to-watch');
}
};
/**
* @unrestricted
*/
Sources.WatchExpression = class extends Common.Object {
/**
* @param {?string} expression
* @param {!ObjectUI.ObjectPropertiesSectionExpandController} expandController
* @param {!Components.Linkifier} linkifier
*/
constructor(expression, expandController, linkifier) {
super();
this._expression = expression;
this._expandController = expandController;
this._element = createElementWithClass('div', 'watch-expression monospace');
this._editing = false;
this._linkifier = linkifier;
this._createWatchExpression();
this.update();
}
/**
* @return {!Element}
*/
element() {
return this._element;
}
/**
* @return {?string}
*/
expression() {
return this._expression;
}
update() {
const currentExecutionContext = UI.context.flavor(SDK.ExecutionContext);
if (currentExecutionContext && this._expression) {
currentExecutionContext
.evaluate(
{
expression: this._expression,
objectGroup: Sources.WatchExpression._watchObjectGroupId,
includeCommandLineAPI: false,
silent: true,
returnByValue: false,
generatePreview: false
},
/* userGesture */ false,
/* awaitPromise */ false)
.then(result => this._createWatchExpression(result.object, result.exceptionDetails));
}
}
startEditing() {
this._editing = true;
this._element.removeChild(this._objectPresentationElement);
const newDiv = this._element.createChild('div');
newDiv.textContent = this._nameElement.textContent;
this._textPrompt = new ObjectUI.ObjectPropertyPrompt();
this._textPrompt.renderAsBlock();
const proxyElement = this._textPrompt.attachAndStartEditing(newDiv, this._finishEditing.bind(this));
proxyElement.classList.add('watch-expression-text-prompt-proxy');
proxyElement.addEventListener('keydown', this._promptKeyDown.bind(this), false);
this._element.getComponentSelection().selectAllChildren(newDiv);
}
/**
* @return {boolean}
*/
isEditing() {
return !!this._editing;
}
/**
* @param {!Event} event
* @param {boolean=} canceled
*/
_finishEditing(event, canceled) {
if (event)
event.consume(canceled);
this._editing = false;
this._textPrompt.detach();
const newExpression = canceled ? this._expression : this._textPrompt.text();
delete this._textPrompt;
this._element.removeChildren();
this._element.appendChild(this._objectPresentationElement);
this._updateExpression(newExpression);
}
/**
* @param {!Event} event
*/
_dblClickOnWatchExpression(event) {
event.consume();
if (!this.isEditing())
this.startEditing();
}
/**
* @param {?string} newExpression
*/
_updateExpression(newExpression) {
if (this._expression)
this._expandController.stopWatchSectionsWithId(this._expression);
this._expression = newExpression;
this.update();
this.dispatchEventToListeners(Sources.WatchExpression.Events.ExpressionUpdated, this);
}
/**
* @param {!Event} event
*/
_deleteWatchExpression(event) {
event.consume(true);
this._updateExpression(null);
}
/**
* @param {!SDK.RemoteObject=} result
* @param {!Protocol.Runtime.ExceptionDetails=} exceptionDetails
*/
_createWatchExpression(result, exceptionDetails) {
this._result = result || null;
const headerElement = createElementWithClass('div', 'watch-expression-header');
const deleteButton = UI.Icon.create('smallicon-cross', 'watch-expression-delete-button');
deleteButton.title = ls`Delete watch expression`;
deleteButton.addEventListener('click', this._deleteWatchExpression.bind(this), false);
headerElement.appendChild(deleteButton);
const titleElement = headerElement.createChild('div', 'watch-expression-title');
this._nameElement = ObjectUI.ObjectPropertiesSection.createNameElement(this._expression);
if (!!exceptionDetails || !result) {
this._valueElement = createElementWithClass('span', 'watch-expression-error value');
titleElement.classList.add('dimmed');
this._valueElement.textContent = Common.UIString('<not available>');
} else {
this._valueElement = ObjectUI.ObjectPropertiesSection.createValueElementWithCustomSupport(
result, !!exceptionDetails, false /* showPreview */, titleElement, this._linkifier);
}
const separatorElement = createElementWithClass('span', 'watch-expressions-separator');
separatorElement.textContent = ': ';
titleElement.appendChildren(this._nameElement, separatorElement, this._valueElement);
this._element.removeChildren();
this._objectPropertiesSection = null;
if (!exceptionDetails && result && result.hasChildren && !result.customPreview()) {
headerElement.classList.add('watch-expression-object-header');
this._objectPropertiesSection = new ObjectUI.ObjectPropertiesSection(result, headerElement, this._linkifier);
this._objectPresentationElement = this._objectPropertiesSection.element;
this._objectPresentationElement.classList.add('watch-expression-object');
this._expandController.watchSection(/** @type {string} */ (this._expression), this._objectPropertiesSection);
const objectTreeElement = this._objectPropertiesSection.objectTreeElement();
objectTreeElement.toggleOnClick = false;
objectTreeElement.listItemElement.addEventListener('click', this._onSectionClick.bind(this), false);
objectTreeElement.listItemElement.addEventListener('dblclick', this._dblClickOnWatchExpression.bind(this));
} else {
this._objectPresentationElement = headerElement;
this._objectPresentationElement.addEventListener('dblclick', this._dblClickOnWatchExpression.bind(this));
}
this._element.appendChild(this._objectPresentationElement);
}
/**
* @param {!Event} event
*/
_onSectionClick(event) {
event.consume(true);
if (event.detail === 1) {
this._preventClickTimeout = setTimeout(handleClick.bind(this), 333);
} else {
clearTimeout(this._preventClickTimeout);
delete this._preventClickTimeout;
}
/**
* @this {Sources.WatchExpression}
*/
function
|
() {
if (!this._objectPropertiesSection)
return;
const objectTreeElement = this._objectPropertiesSection.objectTreeElement();
if (objectTreeElement.expanded)
objectTreeElement.collapse();
else
objectTreeElement.expand();
}
}
/**
* @param {!Event} event
*/
_promptKeyDown(event) {
if (isEnterKey(event) || isEscKey(event))
this._finishEditing(event, isEscKey(event));
}
/**
* @param {!UI.ContextMenu} contextMenu
* @param {!Event} event
*/
_populateContextMenu(contextMenu, event) {
if (!this.isEditing()) {
contextMenu.editSection().appendItem(
Common.UIString('Delete watch expression'), this._updateExpression.bind(this, null));
}
if (!this.isEditing() && this._result && (this._result.type === 'number' || this._result.type === 'string'))
contextMenu.clipboardSection().appendItem(Common.UIString('Copy value'), this._copyValueButtonClicked.bind(this));
const target = event.deepElementFromPoint();
if (target && this._valueElement.isSelfOrAncestor(target))
contextMenu.appendApplicableItems(this._result);
}
_copyValueButtonClicked() {
InspectorFrontendHost.copyText(this._valueElement.textContent);
}
};
Sources.WatchExpression._watchObjectGroupId = 'watch-group';
/** @enum {symbol} */
Sources.WatchExpression.Events = {
ExpressionUpdated: Symbol('ExpressionUpdated')
};
|
handleClick
|
groups_settings_scanner.py
|
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Scanner for the GroupsSettings rules engine."""
import json
from google.cloud.forseti.common.gcp_type import groups_settings
from google.cloud.forseti.common.util import logger
from google.cloud.forseti.scanner.audit import groups_settings_rules_engine
from google.cloud.forseti.scanner.scanners import base_scanner
LOGGER = logger.get_logger(__name__)
class GroupsSettingsScanner(base_scanner.BaseScanner):
"""Scanner for GroupsSettings data."""
def __init__(self, global_configs, scanner_configs, service_config,
model_name, snapshot_timestamp, rules):
"""Initialization.
Args:
global_configs (dict): Global configurations.
scanner_configs (dict): Scanner configurations.
service_config (ServiceConfig): Forseti 2.0 service configs
model_name (str): name of the data model
snapshot_timestamp (str): Timestamp, formatted as YYYYMMDDTHHMMSSZ.
rules (str): Fully-qualified path and filename of the rules file.
"""
super(GroupsSettingsScanner, self).__init__(
global_configs,
scanner_configs,
service_config,
model_name,
snapshot_timestamp,
rules)
self.rules_engine = (groups_settings_rules_engine.
GroupsSettingsRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp))
self.rules_engine.build_rule_book(self.global_configs)
@staticmethod
def _flatten_violations(violations):
"""Flatten RuleViolations into a dict for each RuleViolation member.
Args:
violations (list): The RuleViolations to flatten.
Yields:
dict: Iterator of RuleViolations as a dict per member.
"""
for violation in violations:
resource_data = {
'whoCanAdd': violation.whoCanAdd,
'whoCanJoin': violation.whoCanJoin,
'whoCanViewMembership': violation.whoCanViewMembership,
'whoCanViewGroup': violation.whoCanViewGroup,
'whoCanInvite': violation.whoCanInvite,
'allowExternalMembers': violation.allowExternalMembers,
'whoCanLeaveGroup': violation.whoCanLeaveGroup,
}
yield {
'resource_id': violation.group_email,
'full_name': violation.group_email,
'resource_name': violation.group_email,
'resource_data': json.dumps(resource_data, sort_keys=True),
'violation_data': violation.violation_reason,
'resource_type': violation.resource_type,
'rule_index': violation.rule_index,
'rule_name': violation.rule_name,
'violation_type': violation.violation_type,
}
def _output_results(self, all_violations):
"""Output results.
Args:
all_violations (list): All violations.
"""
all_violations = list(self._flatten_violations(all_violations))
self._output_results_to_db(all_violations)
def _find_violations(self, all_groups_settings, iam_groups_settings):
"""Find violations in the settings.
Args:
all_groups_settings (list): GroupsSettings list to find violations
in.
iam_groups_settings (list): GroupsSettings list for only those
groups settings that have at least 1 iam policy, to find violations
in.
Returns:
list: All violations.
"""
all_violations = []
LOGGER.info('Finding groups settings violations...')
for settings in all_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=False)
LOGGER.debug(violations)
all_violations.extend(violations)
for settings in iam_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=True)
LOGGER.debug(violations)
all_violations.extend(violations)
return all_violations
def _retrieve(self):
|
def run(self):
"""Run, the entry point for this scanner."""
all_groups_settings, iam_groups_settings = self._retrieve()
all_violations = self._find_violations(all_groups_settings,
iam_groups_settings)
self._output_results(all_violations)
|
"""Runs the data collection.
Returns:
tupl: 2 lists of GroupsSettings objects, 1 only for settings that
have iam policies and 1 with all groups settings.
Raises:
ValueError: if resources have an unexpected type.
"""
all_groups_settings = []
iam_groups_settings = []
model_manager = self.service_config.model_manager
scoped_session, data_access = model_manager.get(self.model_name)
with scoped_session as session:
for settings in data_access.scanner_fetch_groups_settings(session,
True):
email = settings[0].split('group/')[1]
iam_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
for settings in data_access.scanner_fetch_groups_settings(session,
False):
email = settings[0].split('group/')[1]
all_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
return (all_groups_settings, iam_groups_settings)
|
TimeAgo.contracts.ts
|
import { TIME_PERIODS } from "./TimeAgo.constants";
export interface DateDifferenceReturnValue {
count?: number;
key: Uppercase<typeof TIME_PERIODS[number]> | "FEW_SECONDS";
|
}
|
|
Headset.py
|
# -*- coding: utf-8 -*-
"""
Object representation for Headset device.
"""
import serial
import logging
from pymongo import MongoClient
from InputDeviceInterface import InputDeviceInterface
from HeadsetThreadReader import HeadsetThreadReader
from TimeBuffer import TimeBuffer
import helpers
import constants
class Headset(InputDeviceInterface):
db = None
port = None
baudrate = 0
is_reading = False
device_reader = None
device_handler = None
device_buffer = None
logger = None
def __init__(self, logging_level=logging.ERROR):
"""
Creates an insance of Headset class and initializes the mandatory
variables required by the reading process. Optionally, can receive
the level for logging. This could be useful for debugging process.
"""
logging.basicConfig(level=logging_level)
self.logger = logging.getLogger(__name__)
self.device_buffer = TimeBuffer(constants.HEADSET_TIME_WINDOW_SIZE)
# Set bounds for good signal deviation standard.
self.__set_signal_quality_variance_range()
def connect(self, port, baudrate):
"""
Attempts to establish a connection to the port received, with
a baud rate of baudrate.
Check PySerial's docs for possible values for baudrate.
"""
try:
self.logger.info("Connecting to port \'{}\'...".format(port))
self.device_handler = serial.Serial(port, baudrate, timeout=1)
self.logger.info(
"Connection to port \'{}\' established.".format(port)
)
self.port = port
self.baudrate = baudrate
except Exception, e:
raise e
def isConnected(self):
"""
Check if headset device is connected.
"""
return self.device_handler.isOpen()
def startReading(self, persist_data=False):
"""
Starts the serial port reading. If device isn't connected, throws
and exception.
*persist_data*: boolean to set if data must be stored en mongodb.
"""
if (not self.isConnected()):
raise Exception("Device is not conected.")
if (not self.device_buffer):
raise Exception("Buffer not initialized.")
if (persist_data):
# Can raise an pymongo.errors.ServerSelectionTimeoutError
self.__start_database()
self.device_reader = HeadsetThreadReader(
self.device_handler,
self.device_buffer,
self.db,
persist_data
)
self.device_reader.start()
self.is_reading = True
def stopReading(self):
"""
Stops current reading process.
Warning: this method doesn't close the port.
"""
if (not self.is_reading):
self.logger.info('Headset is not reading.')
return
self.device_reader.stopReading()
self.device_reader.join()
self.is_reading = False
def
|
(self):
"""
Gets the quality of signal of each sensor. It returns a python
dictionary like this: {"s1": 0, "s2": 3, ...}
Possible values for each sensor are:
- 0: No signal
- 1: Bad signal
- 3: Good signal
"""
currentData = self.device_buffer.getAll()
# If the buffer is not full, the signal is bad.
if (len(currentData) < constants.HEADSET_MIN_BUFFER_SIZE):
status = {}
for i in range(0, constants.HEADSET_NUMBER_OF_SENSORS):
status["s" + str(i + 1)] = 0
self.logger.info(
"Not enough data to check signal quality. {} found.".format(
len(currentData)
)
)
return status
sensorsData = [[] for i in range(constants.HEADSET_NUMBER_OF_SENSORS)]
for sample in currentData:
sample.pop('readed_at')
index = 0
for sensor in sample:
sensorsData[index].append(sample[sensor])
index += 1
status = {}
for i in range(0, constants.HEADSET_NUMBER_OF_SENSORS):
# No signal
if (self.__is_no_signal(sensorsData[i])):
status["s" + str(i + 1)] = 0
# Good signal
elif (self.__is_good_signal(sensorsData[i])):
status["s" + str(i + 1)] = 3
# Bad signal
else:
status["s" + str(i + 1)] = 1
self.logger.info(
"Status calculated on {} samples.".format(len(currentData))
)
return status
def getCurrentData(self):
"""
Retrieve data acquired in the last second.
"""
return self.device_buffer.getAll()
def closePort(self):
"""
Close current connection. If device isn't connected or program
is still reading, it does nothing.
"""
# Si no esta conectado, no puede cerrar.
if (not self.isConnected()):
self.logger.info("Device is not conected.")
return False
# Si todavia esta leyendo, no puede cerrar.
if (self.is_reading):
self.logger.info("Can't close port because is still reading.")
return False
self.logger.info("Closing port " + str(self.port) + "...")
self.device_handler.close()
self.logger.info("Port " + str(self.port) + " closed successfully.")
return True
def __start_database(self):
self.logger.info("Starting mongo client...")
try:
client = MongoClient("localhost", serverSelectionTimeoutMS=1)
# Force connection on this request. This will raise an
# exception if can't establish connection with server.
client.server_info()
self.db = client.emotrix_db
except Exception, e:
raise Exception("Unable to connect to MongoDB server. \n" + str(e))
self.logger.info("MongoDB server connection established.")
def __set_signal_quality_variance_range(self):
# Good sinal
variance_info = helpers.get_variance_range(
constants.HEADSET_MIN_BUFFER_SIZE,
1000,
constants.HEADSET_GOOD_SIGNAL_MAX_AMPLITUDE
)
constants.HEADSET_GOOD_SIGNAL_MIN_VAR = variance_info[0]
constants.HEADSET_GOOD_SIGNAL_MAX_VAR = variance_info[1]
def __is_no_signal(self, samples):
min_value = constants.HEADSET_CENTER - (
constants.HEADSET_NO_SIGNAL_MAX_AMPLITUDE / 2
)
max_value = constants.HEADSET_CENTER + (
constants.HEADSET_NO_SIGNAL_MAX_AMPLITUDE / 2
)
if ((min(samples) >= min_value) and (max(samples) <= max_value)):
return True
return False
def __is_good_signal(self, samples):
data_variance = helpers.variance(samples)
if (
(data_variance >= constants.HEADSET_GOOD_SIGNAL_MIN_VAR) and
(data_variance <= constants.HEADSET_GOOD_SIGNAL_MAX_VAR)
):
return True
return False
|
getStatus
|
parameters.ts
|
/**
* Prefix the parameter.
*
* @param param
*/
function
|
(param: string): string {
return `swb_${param}`;
}
export default {
DEBUG: prefixed('debug'),
LOG_LEVEL: prefixed('log'),
ENV: prefixed('env'),
REGION: prefixed('region'),
DEPLOYMENT: prefixed('d'),
POLICY_SCOPE: prefixed('p'),
LANGUAGE: prefixed('l'),
SHOW: prefixed('show'),
get: (key: string, input: string): string => (new URLSearchParams(input)).get(key) || '',
has: (key: string, input: string): boolean => (new URLSearchParams(input)).has(key),
};
|
prefixed
|
mongo.go
|
package connections
import (
"crypto/tls"
"github.com/globalsign/mgo"
"github.com/go-bongo/bongo"
"github.com/mohemohe/butimili-api/configs"
"github.com/mohemohe/butimili-api/util"
"github.com/sirupsen/logrus"
"net"
"time"
)
var mongoConn *bongo.Connection
func Mongo() *bongo.Connection {
if mongoConn == nil {
mongoConn = NewMongo()
}
return mongoConn
}
func
|
() *bongo.Connection {
util.Logger().WithFields(logrus.Fields{
"address": configs.GetEnv().Mongo.Address,
"database": configs.GetEnv().Mongo.Database,
"ssl": configs.GetEnv().Mongo.SSL,
}).Info("create mongo connection")
config := &bongo.Config{
ConnectionString: configs.GetEnv().Mongo.Address,
Database: configs.GetEnv().Mongo.Database,
}
if configs.GetEnv().Mongo.SSL {
// REF: https://github.com/go-bongo/bongo/pull/11
if dialInfo, err := mgo.ParseURL(config.ConnectionString); err != nil {
util.Logger().Fatal(err)
} else {
config.DialInfo = dialInfo
}
tlsConfig := &tls.Config{}
config.DialInfo.DialServer = func(addr *mgo.ServerAddr) (net.Conn, error) {
conn, err := tls.Dial("tcp", addr.String(), tlsConfig)
return conn, err
}
config.DialInfo.Timeout = time.Second * 3
}
conn, err := bongo.Connect(config)
if err != nil {
panic(err)
}
util.Logger().Info("mongo connection created")
return conn
}
|
NewMongo
|
get.go
|
/*
Copyright 2018 the Velero contributors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package config
import (
"fmt"
"sort"
"github.com/spf13/cobra"
"github.com/heptio/velero/pkg/client"
"github.com/heptio/velero/pkg/cmd"
)
func NewGetCommand() *cobra.Command
|
{
c := &cobra.Command{
Use: "get [KEY 1] [KEY 2] [...]",
Short: "Get client configuration file values",
Run: func(c *cobra.Command, args []string) {
config, err := client.LoadConfig()
cmd.CheckError(err)
if len(args) == 0 {
keys := make([]string, 0, len(config))
for key := range config {
keys = append(keys, key)
}
sort.Strings(keys)
for _, key := range keys {
fmt.Printf("%s: %s\n", key, config[key])
}
} else {
for _, key := range args {
value, found := config[key]
if !found {
value = "<NOT SET>"
}
fmt.Printf("%s: %s\n", key, value)
}
}
},
}
return c
}
|
|
list_groups.go
|
package schedulerx2
//Licensed under the Apache License, Version 2.0 (the "License");
//you may not use this file except in compliance with the License.
//You may obtain a copy of the License at
//
//http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing, software
//distributed under the License is distributed on an "AS IS" BASIS,
//WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//See the License for the specific language governing permissions and
//limitations under the License.
//
// Code generated by Alibaba Cloud SDK Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/requests"
"github.com/aliyun/alibaba-cloud-sdk-go/sdk/responses"
)
// ListGroups invokes the schedulerx2.ListGroups API synchronously
func (client *Client) ListGroups(request *ListGroupsRequest) (response *ListGroupsResponse, err error) {
response = CreateListGroupsResponse()
err = client.DoAction(request, response)
return
}
// ListGroupsWithChan invokes the schedulerx2.ListGroups API asynchronously
func (client *Client) ListGroupsWithChan(request *ListGroupsRequest) (<-chan *ListGroupsResponse, <-chan error) {
responseChan := make(chan *ListGroupsResponse, 1)
errChan := make(chan error, 1)
err := client.AddAsyncTask(func() {
defer close(responseChan)
defer close(errChan)
response, err := client.ListGroups(request)
if err != nil {
errChan <- err
} else {
responseChan <- response
}
})
if err != nil {
errChan <- err
close(responseChan)
close(errChan)
}
return responseChan, errChan
}
// ListGroupsWithCallback invokes the schedulerx2.ListGroups API asynchronously
func (client *Client) ListGroupsWithCallback(request *ListGroupsRequest, callback func(response *ListGroupsResponse, err error)) <-chan int {
result := make(chan int, 1)
err := client.AddAsyncTask(func() {
var response *ListGroupsResponse
var err error
defer close(result)
response, err = client.ListGroups(request)
callback(response, err)
result <- 1
})
if err != nil {
defer close(result)
callback(nil, err)
result <- 0
}
return result
}
// ListGroupsRequest is the request struct for api ListGroups
type ListGroupsRequest struct {
*requests.RpcRequest
NamespaceSource string `position:"Query" name:"NamespaceSource"`
Namespace string `position:"Query" name:"Namespace"`
}
// ListGroupsResponse is the response struct for api ListGroups
type ListGroupsResponse struct {
*responses.BaseResponse
RequestId string `json:"RequestId" xml:"RequestId"`
Code int `json:"Code" xml:"Code"`
Message string `json:"Message" xml:"Message"`
Success bool `json:"Success" xml:"Success"`
Data Data `json:"Data" xml:"Data"`
}
// CreateListGroupsRequest creates a request to invoke ListGroups API
func CreateListGroupsRequest() (request *ListGroupsRequest)
|
// CreateListGroupsResponse creates a response to parse from ListGroups response
func CreateListGroupsResponse() (response *ListGroupsResponse) {
response = &ListGroupsResponse{
BaseResponse: &responses.BaseResponse{},
}
return
}
|
{
request = &ListGroupsRequest{
RpcRequest: &requests.RpcRequest{},
}
request.InitWithApiInfo("schedulerx2", "2019-04-30", "ListGroups", "", "")
request.Method = requests.GET
return
}
|
index.js
|
"use strict";
const stringify = require("../stringify");
const chalk = require("chalk");
const assert = require("assert");
const Path = require("path");
const xsh = require("xsh");
const replaceCwd = p => xsh.pathCwdNm.replace(p, null, "g");
const stringifyTask = x => {
x = stringify(x);
/* istanbul ignore next */
if (Path.sep === "\\") {
/* istanbul ignore next */
x = x.replace(/\\\\/g, "\\");
}
return replaceCwd(x);
};
const printNSTasks = (title, names, tasks) => {
let guideChar = ".";
let nameColor = "cyan";
const taskType = task => task !== null && task !== undefined && task.constructor.name;
const maxNameLen =
2 +
names.reduce((l, x) => {
const task = tasks[x];
if (taskType(task) === "Object" && !task.desc) {
return l;
}
return x.length > l ? x.length : l;
}, 0);
const printTask = name => {
const task = tasks[name];
const tof = taskType(task);
const paddedArr = new Array(Math.max(1, maxNameLen - name.length));
const paddedName = chalk[nameColor](` ${name} ${paddedArr.join(guideChar)}`);
if (tof === "String") {
console.log(paddedName, chalk.magenta(replaceCwd(task)));
} else if (tof === "Array") {
console.log(paddedName, chalk.green(`${stringifyTask(task)}`));
} else if (tof === "Object") {
if (!task.desc) {
return;
}
console.log(paddedName, chalk.yellow(task.desc));
const spacePad = new Array(maxNameLen + 4).join(" ");
if (task.task) {
console.log(chalk.dim.green(`${spacePad} tasks: ${stringifyTask(task.task)}`));
}
if (task.dep) {
console.log(chalk.dim.cyan(`${spacePad} deps: ${stringifyTask(task.dep)}`));
}
} else if (tof === "Function") {
console.log(paddedName, "function", task.name);
} else if (tof === "XTaskSpec") {
console.log(paddedName, chalk.magenta(replaceCwd(task.toString())));
} else {
console.log(paddedName, chalk.red(`Unknown task type ${tof}`));
}
guideChar = guideChar === "." ? "-" : ".";
nameColor = nameColor === "cyan" ? "blue" : "cyan";
};
console.log(chalk.underline(title));
console.log("");
names.sort().forEach(printTask);
console.log("");
};
function
|
(xtasks) {
const namespaces = xtasks._namespaces;
const tasks = xtasks._tasks;
namespaces.forEach(n => {
const nTasks = tasks[n];
assert(nTasks, `Task namespace ${n} is falsy`);
if (Object.keys(nTasks).length === 0) {
return;
}
console.log(chalk.inverse.bold.red(`Namespace '${n}'`));
const taskNames = Object.keys(nTasks).reduce((an, tn) => {
let lbl;
if (tn.match(/(^\.)|([$~])/)) {
lbl = "Hidden";
} else if (tn.match(/^[a-zA-Z_0-9]+$/)) {
lbl = "Primary";
} else {
lbl = "Other";
}
an[lbl] = an[lbl] || [];
an[lbl].push(tn);
return an;
}, {});
const isEmpty = x => !x || x.length === 0;
["Primary", "Other"]
.filter(tt => !isEmpty(taskNames[tt]))
.forEach(tt => printNSTasks(chalk.bold(`${tt} Tasks`), taskNames[tt], nTasks));
});
}
module.exports = printTasks;
|
printTasks
|
option_datefmt.rs
|
use chrono::{DateTime, TimeZone, Utc};
use serde::{Deserialize, Deserializer};
|
fn datefmt<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Utc.datetime_from_str(&s, FORMAT)
.map_err(serde::de::Error::custom)
}
pub fn option_datefmt<'de, D>(deserializer: D) -> Result<Option<DateTime<Utc>>, D::Error>
where
D: Deserializer<'de>,
{
#[derive(Deserialize)]
struct Wrapper(#[serde(deserialize_with = "datefmt")] DateTime<Utc>);
let v = Option::deserialize(deserializer)?;
Ok(v.map(|Wrapper(a)| a))
}
// EXAMPLE:
// #[derive(Deserialize, Debug)]
// struct MyStruct {
// #[serde(default, deserialize_with = "option_datefmt")]
// expiration_date: Option<DateTime<Utc>>,
// }
|
// From https://github.com/serde-rs/serde/issues/1444
// TODO - make it go to millis
const FORMAT: &str = "%Y-%m-%d %H:%M:%S";
|
cursor.go
|
// Copyright (C) MongoDB, Inc. 2017-present.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
package mongo
import (
"context"
"errors"
"fmt"
"io"
"reflect"
"github.com/stlimtat/mongo-go-driver/bson"
"github.com/stlimtat/mongo-go-driver/bson/bsoncodec"
"github.com/stlimtat/mongo-go-driver/x/bsonx/bsoncore"
"github.com/stlimtat/mongo-go-driver/x/mongo/driver"
"github.com/stlimtat/mongo-go-driver/x/mongo/driver/session"
)
// Cursor is used to iterate over a stream of documents. Each document can be decoded into a Go type via the Decode
// method or accessed as raw BSON via the Current field.
type Cursor struct {
// Current contains the BSON bytes of the current change document. This property is only valid until the next call
// to Next or TryNext. If continued access is required, a copy must be made.
Current bson.Raw
bc batchCursor
batch *bsoncore.DocumentSequence
batchLength int
registry *bsoncodec.Registry
clientSession *session.Client
err error
}
func newCursor(bc batchCursor, registry *bsoncodec.Registry) (*Cursor, error) {
return newCursorWithSession(bc, registry, nil)
}
func newCursorWithSession(bc batchCursor, registry *bsoncodec.Registry, clientSession *session.Client) (*Cursor, error) {
if registry == nil {
registry = bson.DefaultRegistry
}
if bc == nil {
return nil, errors.New("batch cursor must not be nil")
}
c := &Cursor{
bc: bc,
registry: registry,
clientSession: clientSession,
}
if bc.ID() == 0 {
c.closeImplicitSession()
}
// Initialize just the batchLength here so RemainingBatchLength will return an accurate result. The actual batch
// will be pulled up by the first Next/TryNext call.
c.batchLength = c.bc.Batch().DocumentCount()
return c, nil
}
func newEmptyCursor() *Cursor {
return &Cursor{bc: driver.NewEmptyBatchCursor()}
}
// ID returns the ID of this cursor, or 0 if the cursor has been closed or exhausted.
func (c *Cursor) ID() int64 { return c.bc.ID() }
// Next gets the next document for this cursor. It returns true if there were no errors and the cursor has not been
// exhausted.
//
// Next blocks until a document is available, an error occurs, or ctx expires. If ctx expires, the
// error will be set to ctx.Err(). In an error case, Next will return false.
//
// If Next returns false, subsequent calls will also return false.
func (c *Cursor) Next(ctx context.Context) bool {
return c.next(ctx, false)
}
// TryNext attempts to get the next document for this cursor. It returns true if there were no errors and the next
// document is available. This is only recommended for use with tailable cursors as a non-blocking alternative to
// Next. See https://docs.mongodb.com/manual/core/tailable-cursors/ for more information about tailable cursors.
//
// TryNext returns false if the cursor is exhausted, an error occurs when getting results from the server, the next
// document is not yet available, or ctx expires. If ctx expires, the error will be set to ctx.Err().
//
// If TryNext returns false and an error occurred or the cursor has been exhausted (i.e. c.Err() != nil || c.ID() == 0),
// subsequent attempts will also return false. Otherwise, it is safe to call TryNext again until a document is
// available.
//
// This method requires driver version >= 1.2.0.
func (c *Cursor) TryNext(ctx context.Context) bool {
return c.next(ctx, true)
}
func (c *Cursor) next(ctx context.Context, nonBlocking bool) bool {
// return false right away if the cursor has already errored.
if c.err != nil {
return false
}
if ctx == nil {
ctx = context.Background()
}
doc, err := c.batch.Next()
switch err {
case nil:
// Consume the next document in the current batch.
c.batchLength--
c.Current = bson.Raw(doc)
return true
case io.EOF: // Need to do a getMore
default:
c.err = err
return false
}
// call the Next method in a loop until at least one document is returned in the next batch or
// the context times out.
for {
// If we don't have a next batch
if !c.bc.Next(ctx) {
// Do we have an error? If so we return false.
c.err = replaceErrors(c.bc.Err())
if c.err != nil {
return false
}
// Is the cursor ID zero?
if c.bc.ID() == 0 {
c.closeImplicitSession()
return false
}
// empty batch, but cursor is still valid.
// use nonBlocking to determine if we should continue or return control to the caller.
if nonBlocking {
return false
}
continue
}
// close the implicit session if this was the last getMore
if c.bc.ID() == 0 {
c.closeImplicitSession()
}
// Use the new batch to update the batch and batchLength fields. Consume the first document in the batch.
c.batch = c.bc.Batch()
c.batchLength = c.batch.DocumentCount()
doc, err = c.batch.Next()
switch err {
case nil:
c.batchLength--
c.Current = bson.Raw(doc)
return true
case io.EOF: // Empty batch so we continue
default:
c.err = err
return false
}
}
}
// Decode will unmarshal the current document into val and return any errors from the unmarshalling process without any
// modification. If val is nil or is a typed nil, an error will be returned.
func (c *Cursor) Decode(val interface{}) error {
return bson.UnmarshalWithRegistry(c.registry, c.Current, val)
}
// Err returns the last error seen by the Cursor, or nil if no error has occurred.
func (c *Cursor) Err() error { return c.err }
// Close closes this cursor. Next and TryNext must not be called after Close has been called. Close is idempotent. After
// the first call, any subsequent calls will not change the state.
func (c *Cursor) Close(ctx context.Context) error {
defer c.closeImplicitSession()
return replaceErrors(c.bc.Close(ctx))
}
// All iterates the cursor and decodes each document into results. The results parameter must be a pointer to a slice.
// The slice pointed to by results will be completely overwritten. This method will close the cursor after retrieving
// all documents. If the cursor has been iterated, any previously iterated documents will not be included in results.
//
// This method requires driver version >= 1.1.0.
func (c *Cursor) All(ctx context.Context, results interface{}) error {
resultsVal := reflect.ValueOf(results)
if resultsVal.Kind() != reflect.Ptr {
return fmt.Errorf("results argument must be a pointer to a slice, but was a %s", resultsVal.Kind())
}
sliceVal := resultsVal.Elem()
if sliceVal.Kind() == reflect.Interface {
sliceVal = sliceVal.Elem()
}
if sliceVal.Kind() != reflect.Slice {
return fmt.Errorf("results argument must be a pointer to a slice, but was a pointer to %s", sliceVal.Kind())
}
elementType := sliceVal.Type().Elem()
var index int
var err error
defer c.Close(ctx)
batch := c.batch // exhaust the current batch before iterating the batch cursor
for {
sliceVal, index, err = c.addFromBatch(sliceVal, elementType, batch, index)
if err != nil {
return err
}
if !c.bc.Next(ctx) {
break
}
batch = c.bc.Batch()
}
if err = replaceErrors(c.bc.Err()); err != nil {
return err
}
resultsVal.Elem().Set(sliceVal.Slice(0, index))
return nil
}
// RemainingBatchLength returns the number of documents left in the current batch. If this returns zero, the subsequent
// call to Next or TryNext will do a network request to fetch the next batch.
func (c *Cursor) RemainingBatchLength() int {
return c.batchLength
}
// addFromBatch adds all documents from batch to sliceVal starting at the given index. It returns the new slice value,
// the next empty index in the slice, and an error if one occurs.
func (c *Cursor) addFromBatch(sliceVal reflect.Value, elemType reflect.Type, batch *bsoncore.DocumentSequence,
index int) (reflect.Value, int, error) {
docs, err := batch.Documents()
if err != nil {
return sliceVal, index, err
}
for _, doc := range docs {
if sliceVal.Len() == index {
// slice is full
newElem := reflect.New(elemType)
sliceVal = reflect.Append(sliceVal, newElem.Elem())
sliceVal = sliceVal.Slice(0, sliceVal.Cap())
}
currElem := sliceVal.Index(index).Addr().Interface()
if err = bson.UnmarshalWithRegistry(c.registry, doc, currElem); err != nil {
return sliceVal, index, err
}
index++
}
return sliceVal, index, nil
}
func (c *Cursor) closeImplicitSession() {
if c.clientSession != nil && c.clientSession.SessionType == session.Implicit {
c.clientSession.EndSession()
}
}
// BatchCursorFromCursor returns a driver.BatchCursor for the given Cursor. If there is no underlying
// driver.BatchCursor, nil is returned.
//
// Deprecated: This is an unstable function because the driver.BatchCursor type exists in the "x" package. Neither this
// function nor the driver.BatchCursor type should be used by applications and may be changed or removed in any release.
func BatchCursorFromCursor(c *Cursor) *driver.BatchCursor
|
{
bc, _ := c.bc.(*driver.BatchCursor)
return bc
}
|
|
updatecomponents.py
|
from django.core.management import BaseCommand
import urllib2, json, urllib, base64
from main.models import *
try:
from biojs.settings import GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET
except:
print('ERROR: Could not load config!')
GITHUB_CLIENT_ID = ''
GITHUB_CLIENT_SECRET = ''
from datetime import datetime
import pytz
import ast
import re
# Get sniper data
'''
https://rawgit.com/cytoscape/cytoscape.js/master/package.json
"sniper" key, has "js" and "css". Search for "first"
'''
def get_npm_data():
# response = urllib2.urlopen()
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}
req = urllib2.Request("http://registry.npmjs.com/-/v1/search?text=keywords:biojs,bionode&size=500", headers=hdr)
# req = urllib2.Request("http://registry.npmjs.com/-/v1/search?text=biojs-vis-msa&size=1", headers=hdr)
response = urllib2.urlopen(req)
data = json.load(response)
return data
def get_npm_downloads(url):
|
def send_GET_request(url, user=None, password=None):
request = urllib2.Request(url)
if (user is not None and password is not None):
base64string = base64.encodestring('%s:%s' % (user, password)).replace('\n', '')
request.add_header('Authorization', 'Basic %s' % base64string)
return urllib2.urlopen(request)
def create_jsdelivr_link(owner, repo, file_path, commit=None):
return str('https://cdn.jsdelivr.net/gh/' + owner + '/' + repo + ('@' + commit if commit else '') + file_path)
def is_absolute_dependency(dep):
return re.match('^(?:[a-z]+:)?//', dep) is not None
def get_owner_and_repo_from_github_url(url):
split_url = url.split('?')[0].split('/')
return split_url[4], split_url[5]
### store the dependency urls and snippet urls
def update_visualizations(component, commit_hash, test=False):
owner, repo_name = get_owner_and_repo_from_github_url(component.github_url)
try:
url = create_jsdelivr_link(owner, repo_name, '/package.json', commit_hash)
response = send_GET_request(url)
package = json.load(response)
sniper_data = package["sniper"]
except KeyError:
print('No sniper info in ', repo_name)
return
buildJS = sniper_data.get('buildJS', [])
js = sniper_data.get('js', [])
buildCSS = sniper_data.get('buildCSS', [])
css = sniper_data.get('css', [])
# Move absolute links from js to buildJS and same for css
buildJS = buildJS + filter(lambda l: is_absolute_dependency(l), js)
js = filter(lambda l: not is_absolute_dependency(l), js)
buildCSS = buildCSS + filter(lambda l: is_absolute_dependency(l), css)
css = filter(lambda l: not is_absolute_dependency(l), css)
# Save to db
for dep in buildJS:
JSDependency.objects.create(component=component, js_url=dep, sniper_data_value=dep)
for dep in buildCSS:
CSSDependency.objects.create(component=component, css_url=dep, sniper_data_value=dep)
sniperData, created = SniperData.objects.get_or_create(component=component)
if 'noBrowserify' in sniper_data:
sniperData.no_browserify = sniper_data['noBrowserify']
elif len(js) == 0 and len(css) == 0:
sniperData.no_browserify = True
sniperData.wzrd_url = '#' if sniperData.no_browserify else 'https://wzrd.in/bundle/' + component.name
if 'snippets' in sniper_data:
sniperData.snippets_dir_name = sniper_data['snippets'][0]
sniperData.save()
### For Snippets URLs
try:
url = str('https://api.github.com/repos/' + owner + '/' + repo_name + '/contents/' + sniperData.snippets_dir_name + '?ref=master')
if not test:
print(url)
snippets_data = send_GET_request(url, GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET)
snippets = json.load(snippets_data)
filtered_snippets = filter(lambda s: s['name'].endswith('.js'), snippets)
except Exception as e:
print('ERROR: Something went wrong getting snippets data!')
print(e)
for snip in filtered_snippets:
try:
url = create_jsdelivr_link(owner, repo_name, str('/' + sniperData.snippets_dir_name + '/' + snip['name']), commit_hash)
name = snip.get('name', '').split('.')[0]
Snippet.objects.update_or_create(name=name, url=url, sniperData=sniperData)
except Exception as e:
print('ERROR: Something went wrong creating a new Snippet')
print(e)
class Command(BaseCommand):
# during --help
help = "Command to update the details of all the components from Github"
def handle(self, *args, **options):
all_components = get_npm_data()['objects']
for component in all_components:
component_data = component['package']
try:
_component = Component.objects.get(name=component_data['name'])
print ('exists')
except:
_component = Component.objects.create(name=component_data['name'])
print (_component.name)
try:
_component.version = component_data['version']
except:
pass
try:
_component.short_description = component_data['description']
except:
pass
try:
tags = component_data['keywords']
except:
tags = []
for tag in tags:
try:
_tag = Tag.objects.get(name=tag)
except:
_tag = Tag.objects.create(name=tag)
_component.tags.add(_tag)
if not _tag in _component.tags.all():
_component.tags.add(_tag)
try:
str_date = component_data['date']
req_date = datetime.strptime(str_date, "%Y-%m-%dT%H:%M:%S.%fZ") #This object is timezone unaware
aware_date = pytz.utc.localize(req_date) #This object is now timezone aware
_component.modified_time = aware_date
except:
pass
try:
_component.npm_url = component_data['links']['npm']
except:
pass
try:
_component.homepage_url = component_data['links']['homepage']
except:
pass
try:
github_url = component_data['links']['repository']
url_list = github_url.split('/')
_component.github_url = 'https://api.github.com/repos/' + str(url_list[3]) + '/' + str(url_list[4])
except:
pass
try:
_component.author = component_data['author']['name']
except:
pass
try:
_component.author_email = component_data['author']['email']
except:
pass
_component.save()
if _component.github_url:
print (_component.github_url)
try:
response = send_GET_request(_component.github_url, GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET)
github_data = json.load(response)
except urllib2.HTTPError as e:
print('Error getting github data!')
print(e)
print GITHUB_CLIENT_ID
continue
except Exception as e:
print('Unexpected error accessing Github!')
print(e)
continue
_component.stars = github_data['stargazers_count']
_component.forks = github_data['forks']
# subscriber_count
_component.watchers = github_data['subscribers_count']
_component.icon_url = github_data['owner']['avatar_url']
_component.open_issues = github_data['open_issues']
try:
_component.license = github_data['license']['name']
except:
pass
try:
str_date = github_data['created_at']
req_date = datetime.strptime(str_date, "%Y-%m-%dT%H:%M:%SZ") #This object is timezone unaware
aware_date = pytz.utc.localize(req_date) #This object is now timezone aware
_component.created_time = aware_date
except:
pass
# try:
str_date = github_data['updated_at']
req_date = datetime.strptime(str_date, "%Y-%m-%dT%H:%M:%SZ") #This object is timezone unaware
aware_date = pytz.utc.localize(req_date) #This object is now timezone aware
# if _component.github_update_time:
# if aware_date > _component.github_update_time:
# _component.github_updated_time = aware_date
# latest_commit_hash = get_commit_hash(github_data['commits_url'])
# _component.latest_commit_hash = latest_commit_hash
# update_visualizations(_component, latest_commit_hash)
# else:
_component.github_update_time = aware_date
try:
response = send_GET_request(github_data['commits_url'].split('{')[0], GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET)
latest_commit = json.load(response)[0]
latest_commit_hash = latest_commit['sha']
_component.latest_commit_hash = latest_commit_hash
except:
print('Error getting commit hash!')
pass
try:
update_visualizations(_component, latest_commit_hash)
except Exception as e:
print('Error updating visualisations!')
print(e)
# except:
# pass
_component.save()
print (str(github_data['contributors_url']))
try:
response = send_GET_request(str(github_data['contributors_url']), GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET)
contributors_data = json.load(response)
except:
continue
commits = 0
count = 0
try:
for contributor in contributors_data:
try:
_contributor = Contributor.objects.get(username=contributor["login"])
except:
_contributor = Contributor.objects.create(username=contributor["login"], avatar_url=contributor["avatar_url"])
try:
_contribution = Contribution.objects.get(component=_component, contributor=_contributor)
_contribution.contributions = contributor["contributions"]
_contribution.save()
except:
_contribution = Contribution.objects.create(component=_component, contributor=_contributor, contributions=contributor["contributions"])
commits += _contribution.contributions
count +=1
except:
print ('Error')
continue
# response = send_GET_request(github_data['downloads_url'], GITHUB_CLIENT_ID, GITHUB_CLIENT_SECRET)
# downloads_array = json.load(response)
_component.downloads = get_npm_downloads(_component.npm_url)
_component.commits = commits
_component.no_of_contributors = count
_component.save()
|
package=url.split('/')[-1]
# dateRange='1980-02-12:'+str(datetime.date(datetime.now()))
dateRange='last-week'
url='https://api.npmjs.org/downloads/range/'+dateRange+'/'+package
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11'}
req = urllib2.Request(url, headers=hdr)
response = urllib2.urlopen(req)
data = json.load(response)
download_count=0
for i in data['downloads']:
download_count+=i['downloads']
return download_count
|
itertools.rs
|
pub(crate) use decl::make_module;
#[pymodule(name = "itertools")]
mod decl {
use crossbeam_utils::atomic::AtomicCell;
use num_bigint::BigInt;
use num_traits::{One, Signed, ToPrimitive, Zero};
use std::fmt;
use std::iter;
use crate::common::cell::{PyMutex, PyRwLock, PyRwLockWriteGuard};
use crate::common::rc::PyRc;
use crate::function::{Args, OptionalArg, OptionalOption, PyFuncArgs};
use crate::obj::objbool;
use crate::obj::objint::{self, PyInt, PyIntRef};
use crate::obj::objiter::{call_next, get_all, get_iter, get_next_object, new_stop_iteration};
use crate::obj::objtuple::PyTupleRef;
use crate::obj::objtype::{self, PyTypeRef};
use crate::pyobject::{
BorrowValue, IdProtocol, IntoPyRef, PyCallable, PyObjectRc, PyObjectRef, PyObjectWeak,
PyRef, PyResult, PyValue, TypeProtocol,
};
use crate::vm::VirtualMachine;
#[pyattr]
#[pyclass(name = "chain")]
#[derive(Debug)]
struct PyItertoolsChain {
iterables: Vec<PyObjectRef>,
cur_idx: AtomicCell<usize>,
cached_iter: PyRwLock<Option<PyObjectRef>>,
}
impl PyValue for PyItertoolsChain {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "chain")
}
}
#[pyimpl]
impl PyItertoolsChain {
#[pyslot]
fn tp_new(cls: PyTypeRef, args: PyFuncArgs, vm: &VirtualMachine) -> PyResult<PyRef<Self>> {
PyItertoolsChain {
iterables: args.args,
cur_idx: AtomicCell::new(0),
cached_iter: PyRwLock::new(None),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
loop {
let pos = self.cur_idx.load();
if pos >= self.iterables.len() {
break;
}
let cur_iter = if self.cached_iter.read().is_none() {
// We need to call "get_iter" outside of the lock.
let iter = get_iter(vm, &self.iterables[pos])?;
*self.cached_iter.write() = Some(iter.clone());
iter
} else if let Some(cached_iter) = (*(self.cached_iter.read())).clone() {
cached_iter
} else {
// Someone changed cached iter to None since we checked.
continue;
};
// We need to call "call_next" outside of the lock.
match call_next(vm, &cur_iter) {
Ok(ok) => return Ok(ok),
Err(err) => {
if objtype::isinstance(&err, &vm.ctx.exceptions.stop_iteration) {
self.cur_idx.fetch_add(1);
*self.cached_iter.write() = None;
} else {
return Err(err);
}
}
}
}
Err(new_stop_iteration(vm))
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pyclassmethod(name = "from_iterable")]
fn from_iterable(
cls: PyTypeRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let it = get_iter(vm, &iterable)?;
let iterables = get_all(vm, &it)?;
PyItertoolsChain {
iterables,
cur_idx: AtomicCell::new(0),
cached_iter: PyRwLock::new(None),
}
.into_ref_with_type(vm, cls)
}
}
#[pyattr]
#[pyclass(name = "compress")]
#[derive(Debug)]
struct PyItertoolsCompress {
data: PyObjectRef,
selector: PyObjectRef,
}
impl PyValue for PyItertoolsCompress {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "compress")
}
}
#[pyimpl]
impl PyItertoolsCompress {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
data: PyObjectRef,
selector: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let data_iter = get_iter(vm, &data)?;
let selector_iter = get_iter(vm, &selector)?;
PyItertoolsCompress {
data: data_iter,
selector: selector_iter,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
loop {
let sel_obj = call_next(vm, &self.selector)?;
let verdict = objbool::boolval(vm, sel_obj.clone())?;
let data_obj = call_next(vm, &self.data)?;
if verdict {
return Ok(data_obj);
}
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "count")]
#[derive(Debug)]
struct PyItertoolsCount {
cur: PyRwLock<BigInt>,
step: BigInt,
}
impl PyValue for PyItertoolsCount {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "count")
}
}
#[pyimpl]
impl PyItertoolsCount {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
start: OptionalArg<PyIntRef>,
step: OptionalArg<PyIntRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let start = match start.into_option() {
Some(int) => int.borrow_value().clone(),
None => BigInt::zero(),
};
let step = match step.into_option() {
Some(int) => int.borrow_value().clone(),
None => BigInt::one(),
};
PyItertoolsCount {
cur: PyRwLock::new(start),
step,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult<PyIntRef> {
let mut cur = self.cur.write();
let result = cur.clone();
*cur += &self.step;
Ok(result.into_pyref(vm))
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "cycle")]
#[derive(Debug)]
struct PyItertoolsCycle {
iter: PyObjectRef,
saved: PyRwLock<Vec<PyObjectRef>>,
index: AtomicCell<usize>,
}
impl PyValue for PyItertoolsCycle {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "cycle")
}
}
#[pyimpl]
impl PyItertoolsCycle {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsCycle {
iter,
saved: PyRwLock::new(Vec::new()),
index: AtomicCell::new(0),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let item = if let Some(item) = get_next_object(vm, &self.iter)? {
self.saved.write().push(item.clone());
item
} else {
let saved = self.saved.read();
if saved.len() == 0 {
return Err(new_stop_iteration(vm));
}
let last_index = self.index.fetch_add(1);
if last_index >= saved.len() - 1 {
self.index.store(0);
}
saved[last_index].clone()
};
Ok(item)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "repeat")]
#[derive(Debug)]
struct PyItertoolsRepeat {
object: PyObjectRef,
times: Option<PyRwLock<BigInt>>,
}
impl PyValue for PyItertoolsRepeat {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "repeat")
}
}
#[pyimpl]
impl PyItertoolsRepeat {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
object: PyObjectRef,
times: OptionalArg<PyIntRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let times = match times.into_option() {
Some(int) => Some(PyRwLock::new(int.borrow_value().clone())),
None => None,
};
PyItertoolsRepeat { object, times }.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
if let Some(ref times) = self.times {
let mut times = times.write();
if !times.is_positive() {
return Err(new_stop_iteration(vm));
}
*times -= 1;
}
Ok(self.object.clone())
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pymethod(name = "__length_hint__")]
fn length_hint(&self, vm: &VirtualMachine) -> PyObjectRef {
match self.times {
Some(ref times) => vm.ctx.new_int(times.read().clone()),
None => vm.ctx.new_int(0),
}
}
}
#[pyattr]
#[pyclass(name = "starmap")]
#[derive(Debug)]
struct PyItertoolsStarmap {
function: PyObjectRef,
iter: PyObjectRef,
}
impl PyValue for PyItertoolsStarmap {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "starmap")
}
}
#[pyimpl]
impl PyItertoolsStarmap {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
function: PyObjectRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsStarmap { function, iter }.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let obj = call_next(vm, &self.iter)?;
let function = &self.function;
vm.invoke(function, vm.extract_elements(&obj)?)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "takewhile")]
#[derive(Debug)]
struct PyItertoolsTakewhile {
predicate: PyObjectRef,
iterable: PyObjectRef,
stop_flag: AtomicCell<bool>,
}
impl PyValue for PyItertoolsTakewhile {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "takewhile")
}
}
#[pyimpl]
impl PyItertoolsTakewhile {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
predicate: PyObjectRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsTakewhile {
predicate,
iterable: iter,
stop_flag: AtomicCell::new(false),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
if self.stop_flag.load() {
return Err(new_stop_iteration(vm));
}
// might be StopIteration or anything else, which is propagated upwards
let obj = call_next(vm, &self.iterable)?;
let predicate = &self.predicate;
let verdict = vm.invoke(predicate, vec![obj.clone()])?;
let verdict = objbool::boolval(vm, verdict)?;
if verdict {
Ok(obj)
} else {
self.stop_flag.store(true);
Err(new_stop_iteration(vm))
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "dropwhile")]
#[derive(Debug)]
struct PyItertoolsDropwhile {
predicate: PyCallable,
iterable: PyObjectRef,
start_flag: AtomicCell<bool>,
}
impl PyValue for PyItertoolsDropwhile {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "dropwhile")
}
}
#[pyimpl]
impl PyItertoolsDropwhile {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
predicate: PyCallable,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsDropwhile {
predicate,
iterable: iter,
start_flag: AtomicCell::new(false),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let predicate = &self.predicate;
let iterable = &self.iterable;
if !self.start_flag.load() {
loop {
let obj = call_next(vm, iterable)?;
let pred = predicate.clone();
let pred_value = vm.invoke(&pred.into_object(), vec![obj.clone()])?;
if !objbool::boolval(vm, pred_value)? {
self.start_flag.store(true);
return Ok(obj);
}
}
}
call_next(vm, iterable)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
struct GroupByState {
current_value: Option<PyObjectRef>,
current_key: Option<PyObjectRef>,
next_group: bool,
grouper: Option<PyObjectWeak<PyItertoolsGrouper>>,
}
impl fmt::Debug for GroupByState {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("GroupByState")
.field("current_value", &self.current_value)
.field("current_key", &self.current_key)
.field("next_group", &self.next_group)
.finish()
}
}
impl GroupByState {
fn is_current(&self, grouper: &PyItertoolsGrouperRef) -> bool {
self.grouper
.as_ref()
.and_then(|g| g.upgrade())
.map_or(false, |ref current_grouper| grouper.is(current_grouper))
}
}
#[pyattr]
#[pyclass(name = "groupby")]
struct PyItertoolsGroupBy {
iterable: PyObjectRef,
key_func: Option<PyObjectRef>,
state: PyMutex<GroupByState>,
}
impl PyValue for PyItertoolsGroupBy {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "groupby")
}
}
impl fmt::Debug for PyItertoolsGroupBy {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("PyItertoolsGroupBy")
.field("iterable", &self.iterable)
.field("key_func", &self.key_func)
.field("state", &self.state.lock())
.finish()
}
}
#[derive(FromArgs)]
struct GroupByArgs {
iterable: PyObjectRef,
#[pyarg(positional_or_keyword, optional = true)]
key: OptionalOption<PyObjectRef>,
}
#[pyimpl]
impl PyItertoolsGroupBy {
#[pyslot]
fn tp_new(cls: PyTypeRef, args: GroupByArgs, vm: &VirtualMachine) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &args.iterable)?;
PyItertoolsGroupBy {
iterable: iter,
key_func: args.key.flatten(),
state: PyMutex::new(GroupByState {
current_key: None,
current_value: None,
next_group: false,
grouper: None,
}),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(
zelf: PyRef<Self>,
vm: &VirtualMachine,
) -> PyResult<(PyObjectRef, PyItertoolsGrouperRef)> {
let mut state = zelf.state.lock();
state.grouper = None;
if !state.next_group {
// FIXME: unnecessary clone. current_key always exist until assinging new
let current_key = state.current_key.clone();
drop(state);
let (value, key) = if let Some(old_key) = current_key {
loop {
let (value, new_key) = zelf.advance(vm)?;
if !vm.bool_eq(&new_key, &old_key)? {
break (value, new_key);
}
}
} else {
zelf.advance(vm)?
};
state = zelf.state.lock();
state.current_value = Some(value);
state.current_key = Some(key);
}
state.next_group = false;
let grouper = PyItertoolsGrouper {
groupby: zelf.clone(),
}
.into_ref(vm);
state.grouper = Some(PyObjectRc::downgrade(&grouper.clone().into_typed_pyobj()));
Ok((state.current_key.as_ref().unwrap().clone(), grouper))
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
pub(super) fn advance(&self, vm: &VirtualMachine) -> PyResult<(PyObjectRef, PyObjectRef)> {
let new_value = call_next(vm, &self.iterable)?;
let new_key = if let Some(ref kf) = self.key_func {
vm.invoke(kf, new_value.clone())?
} else {
new_value.clone()
};
Ok((new_value, new_key))
}
}
#[pyattr]
#[pyclass(name = "_grouper")]
#[derive(Debug)]
struct PyItertoolsGrouper {
groupby: PyRef<PyItertoolsGroupBy>,
}
type PyItertoolsGrouperRef = PyRef<PyItertoolsGrouper>;
impl PyValue for PyItertoolsGrouper {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "_grouper")
}
}
#[pyimpl]
impl PyItertoolsGrouper {
#[pymethod(name = "__next__")]
fn next(zelf: PyRef<Self>, vm: &VirtualMachine) -> PyResult {
let old_key = {
let mut state = zelf.groupby.state.lock();
if !state.is_current(&zelf) {
return Err(new_stop_iteration(vm));
}
// check to see if the value has already been retrieved from the iterator
if let Some(val) = state.current_value.take() {
return Ok(val);
}
state.current_key.as_ref().unwrap().clone()
};
let (value, key) = zelf.groupby.advance(vm)?;
if vm.bool_eq(&key, &old_key)? {
Ok(value)
} else {
let mut state = zelf.groupby.state.lock();
state.current_value = Some(value);
state.current_key = Some(key);
state.next_group = true;
state.grouper = None;
Err(new_stop_iteration(vm))
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "islice")]
#[derive(Debug)]
struct PyItertoolsIslice {
iterable: PyObjectRef,
cur: AtomicCell<usize>,
next: AtomicCell<usize>,
stop: Option<usize>,
step: usize,
}
impl PyValue for PyItertoolsIslice {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "islice")
}
}
fn pyobject_to_opt_usize(obj: PyObjectRef, vm: &VirtualMachine) -> Option<usize> {
let is_int = objtype::isinstance(&obj, &vm.ctx.types.int_type);
if is_int {
objint::get_value(&obj).to_usize()
} else {
None
}
}
#[pyimpl]
impl PyItertoolsIslice {
#[pyslot]
fn tp_new(cls: PyTypeRef, args: PyFuncArgs, vm: &VirtualMachine) -> PyResult<PyRef<Self>> {
let (iter, start, stop, step) = match args.args.len() {
0 | 1 => {
return Err(vm.new_type_error(format!(
"islice expected at least 2 arguments, got {}",
args.args.len()
)));
}
2 => {
let (iter, stop): (PyObjectRef, PyObjectRef) = args.bind(vm)?;
(iter, 0usize, stop, 1usize)
}
_ => {
let (iter, start, stop, step): (
PyObjectRef,
PyObjectRef,
PyObjectRef,
PyObjectRef,
) = args.bind(vm)?;
let start = if !vm.is_none(&start) {
pyobject_to_opt_usize(start, &vm).ok_or_else(|| {
vm.new_value_error(
"Indices for islice() must be None or an integer: 0 <= x <= sys.maxsize.".to_owned(),
)
})?
} else {
0usize
};
let step = if !vm.is_none(&step) {
pyobject_to_opt_usize(step, &vm).ok_or_else(|| {
vm.new_value_error(
"Step for islice() must be a positive integer or None.".to_owned(),
)
})?
} else {
1usize
};
(iter, start, stop, step)
}
};
let stop = if !vm.is_none(&stop) {
Some(pyobject_to_opt_usize(stop, &vm).ok_or_else(|| {
vm.new_value_error(
"Stop argument for islice() must be None or an integer: 0 <= x <= sys.maxsize."
.to_owned(),
)
})?)
} else {
None
};
let iter = get_iter(vm, &iter)?;
PyItertoolsIslice {
iterable: iter,
cur: AtomicCell::new(0),
next: AtomicCell::new(start),
stop,
step,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
while self.cur.load() < self.next.load() {
call_next(vm, &self.iterable)?;
self.cur.fetch_add(1);
}
if let Some(stop) = self.stop {
if self.cur.load() >= stop {
return Err(new_stop_iteration(vm));
}
}
let obj = call_next(vm, &self.iterable)?;
self.cur.fetch_add(1);
// TODO is this overflow check required? attempts to copy CPython.
let (next, ovf) = self.next.load().overflowing_add(self.step);
self.next.store(if ovf { self.stop.unwrap() } else { next });
Ok(obj)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "filterfalse")]
#[derive(Debug)]
struct PyItertoolsFilterFalse {
predicate: PyObjectRef,
iterable: PyObjectRef,
}
impl PyValue for PyItertoolsFilterFalse {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "filterfalse")
}
}
#[pyimpl]
impl PyItertoolsFilterFalse {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
predicate: PyObjectRef,
iterable: PyObjectRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsFilterFalse {
predicate,
iterable: iter,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let predicate = &self.predicate;
let iterable = &self.iterable;
loop {
let obj = call_next(vm, iterable)?;
let pred_value = if vm.is_none(predicate) {
obj.clone()
} else {
vm.invoke(predicate, vec![obj.clone()])?
};
if !objbool::boolval(vm, pred_value)? {
return Ok(obj);
}
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "accumulate")]
#[derive(Debug)]
struct PyItertoolsAccumulate {
iterable: PyObjectRef,
binop: PyObjectRef,
acc_value: PyRwLock<Option<PyObjectRef>>,
}
impl PyValue for PyItertoolsAccumulate {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "accumulate")
}
}
#[pyimpl]
impl PyItertoolsAccumulate {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterable: PyObjectRef,
binop: OptionalArg<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
PyItertoolsAccumulate {
iterable: iter,
binop: binop.unwrap_or_none(vm),
acc_value: PyRwLock::new(None),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let iterable = &self.iterable;
let obj = call_next(vm, iterable)?;
let acc_value = self.acc_value.read().clone();
let next_acc_value = match acc_value {
None => obj,
Some(value) => {
if vm.is_none(&self.binop) {
vm._add(&value, &obj)?
} else {
vm.invoke(&self.binop, vec![value, obj])?
}
}
};
*self.acc_value.write() = Some(next_acc_value.clone());
Ok(next_acc_value)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[derive(Debug)]
struct PyItertoolsTeeData {
iterable: PyObjectRef,
values: PyRwLock<Vec<PyObjectRef>>,
}
impl PyItertoolsTeeData {
fn new(iterable: PyObjectRef, vm: &VirtualMachine) -> PyResult<PyRc<PyItertoolsTeeData>> {
Ok(PyRc::new(PyItertoolsTeeData {
iterable: get_iter(vm, &iterable)?,
values: PyRwLock::new(vec![]),
}))
}
fn get_item(&self, vm: &VirtualMachine, index: usize) -> PyResult {
if self.values.read().len() == index {
let result = call_next(vm, &self.iterable)?;
self.values.write().push(result);
}
Ok(self.values.read()[index].clone())
}
}
#[pyattr]
#[pyclass(name = "tee")]
#[derive(Debug)]
struct PyItertoolsTee {
tee_data: PyRc<PyItertoolsTeeData>,
index: AtomicCell<usize>,
}
impl PyValue for PyItertoolsTee {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "tee")
}
}
#[pyimpl]
impl PyItertoolsTee {
fn from_iter(iterable: PyObjectRef, vm: &VirtualMachine) -> PyResult {
let it = get_iter(vm, &iterable)?;
if it.class().is(&PyItertoolsTee::class(vm)) {
return vm.call_method(&it, "__copy__", PyFuncArgs::from(vec![]));
}
Ok(PyItertoolsTee {
tee_data: PyItertoolsTeeData::new(it, vm)?,
index: AtomicCell::new(0),
}
.into_ref_with_type(vm, PyItertoolsTee::class(vm))?
.into_object())
}
// TODO: make tee() a function, rename this class to itertools._tee and make
// teedata a python class
#[pyslot]
#[allow(clippy::new_ret_no_self)]
fn tp_new(
_cls: PyTypeRef,
iterable: PyObjectRef,
n: OptionalArg<usize>,
vm: &VirtualMachine,
) -> PyResult<PyTupleRef> {
let n = n.unwrap_or(2);
let copyable = if iterable.lease_class().has_attr("__copy__") {
vm.call_method(&iterable, "__copy__", PyFuncArgs::from(vec![]))?
} else {
PyItertoolsTee::from_iter(iterable, vm)?
};
let mut tee_vec: Vec<PyObjectRef> = Vec::with_capacity(n);
for _ in 0..n {
let no_args = PyFuncArgs::from(vec![]);
tee_vec.push(vm.call_method(©able, "__copy__", no_args)?);
}
Ok(PyTupleRef::with_elements(tee_vec, &vm.ctx))
}
#[pymethod(name = "__copy__")]
fn copy(&self, vm: &VirtualMachine) -> PyResult {
Ok(PyItertoolsTee {
tee_data: PyRc::clone(&self.tee_data),
index: AtomicCell::new(self.index.load()),
}
.into_ref_with_type(vm, Self::class(vm))?
.into_object())
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
let value = self.tee_data.get_item(vm, self.index.load())?;
self.index.fetch_add(1);
Ok(value)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "product")]
#[derive(Debug)]
struct PyItertoolsProduct {
pools: Vec<Vec<PyObjectRef>>,
idxs: PyRwLock<Vec<usize>>,
cur: AtomicCell<usize>,
stop: AtomicCell<bool>,
}
impl PyValue for PyItertoolsProduct {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "product")
}
}
#[derive(FromArgs)]
struct ProductArgs {
#[pyarg(keyword_only, optional = true)]
repeat: OptionalArg<usize>,
}
#[pyimpl]
impl PyItertoolsProduct {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterables: Args<PyObjectRef>,
args: ProductArgs,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let repeat = match args.repeat.into_option() {
Some(i) => i,
None => 1,
};
let mut pools = Vec::new();
for arg in iterables.into_iter() {
let it = get_iter(vm, &arg)?;
let pool = get_all(vm, &it)?;
pools.push(pool);
}
let pools = iter::repeat(pools)
.take(repeat)
.flatten()
.collect::<Vec<Vec<PyObjectRef>>>();
let l = pools.len();
PyItertoolsProduct {
pools,
idxs: PyRwLock::new(vec![0; l]),
cur: AtomicCell::new(l.wrapping_sub(1)),
stop: AtomicCell::new(false),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
// stop signal
if self.stop.load() {
return Err(new_stop_iteration(vm));
}
let pools = &self.pools;
for p in pools {
if p.is_empty()
|
}
let idxs = self.idxs.write();
let res = vm.ctx.new_tuple(
pools
.iter()
.zip(idxs.iter())
.map(|(pool, idx)| pool[*idx].clone())
.collect(),
);
self.update_idxs(idxs);
Ok(res)
}
fn update_idxs(&self, mut idxs: PyRwLockWriteGuard<'_, Vec<usize>>) {
if idxs.len() == 0 {
self.stop.store(true);
return;
}
let cur = self.cur.load();
let lst_idx = &self.pools[cur].len() - 1;
if idxs[cur] == lst_idx {
if cur == 0 {
self.stop.store(true);
return;
}
idxs[cur] = 0;
self.cur.fetch_sub(1);
self.update_idxs(idxs);
} else {
idxs[cur] += 1;
self.cur.store(idxs.len() - 1);
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
#[pyattr]
#[pyclass(name = "combinations")]
#[derive(Debug)]
struct PyItertoolsCombinations {
pool: Vec<PyObjectRef>,
indices: PyRwLock<Vec<usize>>,
r: AtomicCell<usize>,
exhausted: AtomicCell<bool>,
}
impl PyValue for PyItertoolsCombinations {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "combinations")
}
}
#[pyimpl]
impl PyItertoolsCombinations {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterable: PyObjectRef,
r: PyIntRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
let pool = get_all(vm, &iter)?;
let r = r.borrow_value();
if r.is_negative() {
return Err(vm.new_value_error("r must be non-negative".to_owned()));
}
let r = r.to_usize().unwrap();
let n = pool.len();
PyItertoolsCombinations {
pool,
indices: PyRwLock::new((0..r).collect()),
r: AtomicCell::new(r),
exhausted: AtomicCell::new(r > n),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
// stop signal
if self.exhausted.load() {
return Err(new_stop_iteration(vm));
}
let n = self.pool.len();
let r = self.r.load();
if r == 0 {
self.exhausted.store(true);
return Ok(vm.ctx.new_tuple(vec![]));
}
let res = vm.ctx.new_tuple(
self.indices
.read()
.iter()
.map(|&i| self.pool[i].clone())
.collect(),
);
let mut indices = self.indices.write();
// Scan indices right-to-left until finding one that is not at its maximum (i + n - r).
let mut idx = r as isize - 1;
while idx >= 0 && indices[idx as usize] == idx as usize + n - r {
idx -= 1;
}
// If no suitable index is found, then the indices are all at
// their maximum value and we're done.
if idx < 0 {
self.exhausted.store(true);
} else {
// Increment the current index which we know is not at its
// maximum. Then move back to the right setting each index
// to its lowest possible value (one higher than the index
// to its left -- this maintains the sort order invariant).
indices[idx as usize] += 1;
for j in idx as usize + 1..r {
indices[j] = indices[j - 1] + 1;
}
}
Ok(res)
}
}
#[pyattr]
#[pyclass(name = "combinations_with_replacement")]
#[derive(Debug)]
struct PyItertoolsCombinationsWithReplacement {
pool: Vec<PyObjectRef>,
indices: PyRwLock<Vec<usize>>,
r: AtomicCell<usize>,
exhausted: AtomicCell<bool>,
}
impl PyValue for PyItertoolsCombinationsWithReplacement {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "combinations_with_replacement")
}
}
#[pyimpl]
impl PyItertoolsCombinationsWithReplacement {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterable: PyObjectRef,
r: PyIntRef,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
let pool = get_all(vm, &iter)?;
let r = r.borrow_value();
if r.is_negative() {
return Err(vm.new_value_error("r must be non-negative".to_owned()));
}
let r = r.to_usize().unwrap();
let n = pool.len();
PyItertoolsCombinationsWithReplacement {
pool,
indices: PyRwLock::new(vec![0; r]),
r: AtomicCell::new(r),
exhausted: AtomicCell::new(n == 0 && r > 0),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
// stop signal
if self.exhausted.load() {
return Err(new_stop_iteration(vm));
}
let n = self.pool.len();
let r = self.r.load();
if r == 0 {
self.exhausted.store(true);
return Ok(vm.ctx.new_tuple(vec![]));
}
let mut indices = self.indices.write();
let res = vm
.ctx
.new_tuple(indices.iter().map(|&i| self.pool[i].clone()).collect());
// Scan indices right-to-left until finding one that is not at its maximum (i + n - r).
let mut idx = r as isize - 1;
while idx >= 0 && indices[idx as usize] == n - 1 {
idx -= 1;
}
// If no suitable index is found, then the indices are all at
// their maximum value and we're done.
if idx < 0 {
self.exhausted.store(true);
} else {
let index = indices[idx as usize] + 1;
// Increment the current index which we know is not at its
// maximum. Then set all to the right to the same value.
for j in idx as usize..r {
indices[j as usize] = index as usize;
}
}
Ok(res)
}
}
#[pyattr]
#[pyclass(name = "permutations")]
#[derive(Debug)]
struct PyItertoolsPermutations {
pool: Vec<PyObjectRef>, // Collected input iterable
indices: PyRwLock<Vec<usize>>, // One index per element in pool
cycles: PyRwLock<Vec<usize>>, // One rollover counter per element in the result
result: PyRwLock<Option<Vec<usize>>>, // Indexes of the most recently returned result
r: AtomicCell<usize>, // Size of result tuple
exhausted: AtomicCell<bool>, // Set when the iterator is exhausted
}
impl PyValue for PyItertoolsPermutations {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "permutations")
}
}
#[pyimpl]
impl PyItertoolsPermutations {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterable: PyObjectRef,
r: OptionalOption<PyObjectRef>,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let iter = get_iter(vm, &iterable)?;
let pool = get_all(vm, &iter)?;
let n = pool.len();
// If r is not provided, r == n. If provided, r must be a positive integer, or None.
// If None, it behaves the same as if it was not provided.
let r = match r.flatten() {
Some(r) => {
let val = r
.payload::<PyInt>()
.ok_or_else(|| vm.new_type_error("Expected int as r".to_owned()))?
.borrow_value();
if val.is_negative() {
return Err(vm.new_value_error("r must be non-negative".to_owned()));
}
val.to_usize().unwrap()
}
None => n,
};
PyItertoolsPermutations {
pool,
indices: PyRwLock::new((0..n).collect()),
cycles: PyRwLock::new((0..r.min(n)).map(|i| n - i).collect()),
result: PyRwLock::new(None),
r: AtomicCell::new(r),
exhausted: AtomicCell::new(r > n),
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
// stop signal
if self.exhausted.load() {
return Err(new_stop_iteration(vm));
}
let n = self.pool.len();
let r = self.r.load();
if n == 0 {
self.exhausted.store(true);
return Ok(vm.ctx.new_tuple(vec![]));
}
let mut result = self.result.write();
if let Some(ref mut result) = *result {
let mut indices = self.indices.write();
let mut cycles = self.cycles.write();
let mut sentinel = false;
// Decrement rightmost cycle, moving leftward upon zero rollover
for i in (0..r).rev() {
cycles[i] -= 1;
if cycles[i] == 0 {
// rotation: indices[i:] = indices[i+1:] + indices[i:i+1]
let index = indices[i];
for j in i..n - 1 {
indices[j] = indices[j + 1];
}
indices[n - 1] = index;
cycles[i] = n - i;
} else {
let j = cycles[i];
indices.swap(i, n - j);
for k in i..r {
// start with i, the leftmost element that changed
// yield tuple(pool[k] for k in indices[:r])
result[k] = indices[k];
}
sentinel = true;
break;
}
}
if !sentinel {
self.exhausted.store(true);
return Err(new_stop_iteration(vm));
}
} else {
// On the first pass, initialize result tuple using the indices
*result = Some((0..r).collect());
}
Ok(vm.ctx.new_tuple(
result
.as_ref()
.unwrap()
.iter()
.map(|&i| self.pool[i].clone())
.collect(),
))
}
}
#[pyattr]
#[pyclass(name = "zip_longest")]
#[derive(Debug)]
struct PyItertoolsZipLongest {
iterators: Vec<PyObjectRef>,
fillvalue: PyObjectRef,
}
impl PyValue for PyItertoolsZipLongest {
fn class(vm: &VirtualMachine) -> PyTypeRef {
vm.class("itertools", "zip_longest")
}
}
#[derive(FromArgs)]
struct ZiplongestArgs {
#[pyarg(keyword_only, optional = true)]
fillvalue: OptionalArg<PyObjectRef>,
}
#[pyimpl]
impl PyItertoolsZipLongest {
#[pyslot]
fn tp_new(
cls: PyTypeRef,
iterables: Args,
args: ZiplongestArgs,
vm: &VirtualMachine,
) -> PyResult<PyRef<Self>> {
let fillvalue = args.fillvalue.unwrap_or_none(vm);
let iterators = iterables
.into_iter()
.map(|iterable| get_iter(vm, &iterable))
.collect::<Result<Vec<_>, _>>()?;
PyItertoolsZipLongest {
iterators,
fillvalue,
}
.into_ref_with_type(vm, cls)
}
#[pymethod(name = "__next__")]
fn next(&self, vm: &VirtualMachine) -> PyResult {
if self.iterators.is_empty() {
Err(new_stop_iteration(vm))
} else {
let mut result: Vec<PyObjectRef> = Vec::new();
let mut numactive = self.iterators.len();
for idx in 0..self.iterators.len() {
let next_obj = match call_next(vm, &self.iterators[idx]) {
Ok(obj) => obj,
Err(err) => {
if !objtype::isinstance(&err, &vm.ctx.exceptions.stop_iteration) {
return Err(err);
}
numactive -= 1;
if numactive == 0 {
return Err(new_stop_iteration(vm));
}
self.fillvalue.clone()
}
};
result.push(next_obj);
}
Ok(vm.ctx.new_tuple(result))
}
}
#[pymethod(name = "__iter__")]
fn iter(zelf: PyRef<Self>) -> PyRef<Self> {
zelf
}
}
}
|
{
return Err(new_stop_iteration(vm));
}
|
test_format.py
|
import datetime
import re
import pytest
from fastjsonschema import JsonSchemaValueException
exc = JsonSchemaValueException('data must be date-time', value='{data}', name='data', definition='{definition}', rule='format')
@pytest.mark.parametrize('value, expected', [
('', exc),
('bla', exc),
('2018-02-05T14:17:10.00', exc),
('2018-02-05T14:17:10.00Z\n', exc),
('2018-02-05T14:17:10.00Z', '2018-02-05T14:17:10.00Z'),
('2018-02-05T14:17:10Z', '2018-02-05T14:17:10Z'),
])
def test_datetime(asserter, value, expected):
asserter({'type': 'string', 'format': 'date-time'}, value, expected)
exc = JsonSchemaValueException('data must be hostname', value='{data}', name='data', definition='{definition}', rule='format')
@pytest.mark.parametrize('value, expected', [
('', exc),
('LDhsjf878&d', exc),
('bla.bla-', exc),
('example.example.com-', exc),
('example.example.com\n', exc),
('localhost', 'localhost'),
('example.com', 'example.com'),
('example.de', 'example.de'),
('example.fr', 'example.fr'),
('example.example.com', 'example.example.com'),
])
def test_hostname(asserter, value, expected):
asserter({'type': 'string', 'format': 'hostname'}, value, expected)
exc = JsonSchemaValueException('data must be custom-format', value='{data}', name='data', definition='{definition}', rule='format')
@pytest.mark.parametrize('value,expected,custom_format', [
('', exc, r'^[ab]$'),
('', exc, lambda value: value in ('a', 'b')),
('a', 'a', r'^[ab]$'),
('a', 'a', lambda value: value in ('a', 'b')),
('c', exc, r'^[ab]$'),
('c', exc, lambda value: value in ('a', 'b')),
])
def test_custom_format(asserter, value, expected, custom_format):
asserter({'format': 'custom-format'}, value, expected, formats={
'custom-format': custom_format,
})
def
|
(asserter):
asserter({'format': 'date-time'}, 'a', 'a', formats={
'date-time': r'^[ab]$',
})
|
test_custom_format_override
|
main.js
|
window.onload = () => {
if (window.location.href === 'https://wybory.online.tvwisla.com.pl/widgets-covid-19/?article=1') {
let init = () => {
const getCellVal = (tr, i) => tr.children[i].innerText || tr.children[i].textContent;
const comparer = (i, asc) => (a, b) => ((x, y) =>
x !== '' && y !== '' && !isNaN(x) && !isNaN(y) ? x - y : x.toString().localeCompare(y)
)(getCellVal(asc ? a : b, i), getCellVal(asc ? b : a, i));
const myTh = document.querySelectorAll('thead td');
|
const tbody = table.querySelector('tbody');
Array.from(table.querySelectorAll('tbody tr'))
.sort(comparer(Array.from(th.parentNode.children).indexOf(th), this.asc = !this.asc))
.forEach(tr => tbody.appendChild(tr));
})));
}
let checkForTable = () => {
if(document.querySelectorAll('.mc-header__title')[1] && document.querySelectorAll('.mc-header__title')[1].textContent === "Przypadki koronawirusa w Polsce z podziałem na województwa"){
clearInterval(checkInterval);
init();
};
}
let checkInterval = setInterval(checkForTable, 100);
}
}
|
myTh.forEach(th => th.addEventListener('click', (() => {
const table = th.closest('table');
|
eda_utils.py
|
import math
import warnings
from itertools import combinations
from typing import TYPE_CHECKING
from typing import Optional
from typing import Sequence
import matplotlib.pyplot as plt
import numpy as np
import seaborn as sns
import statsmodels.api as sm
from matplotlib.ticker import MaxNLocator
from statsmodels.graphics import utils
if TYPE_CHECKING:
from etna.datasets import TSDataset
plot_acf = sm.graphics.tsa.plot_acf
plot_pacf = sm.graphics.tsa.plot_pacf
def cross_corr_plot(ts: "TSDataset", n_segments: int = 10, maxlags: int = 21, segments: Optional[Sequence] = None):
"""
Cross-correlation plot between multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
maxlags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
"""
if not segments:
segments = list(ts.segments)
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
segment_pairs = list(combinations(segments, r=2))
if len(segment_pairs) == 0:
raise ValueError("There are no pairs to plot! Try set n_segments > 1.")
columns_num = min(2, len(segment_pairs))
rows_num = math.ceil(len(segment_pairs) / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Cross-correlation", fontsize=16)
for i, (segment_1, segment_2) in enumerate(segment_pairs):
df_segment_1 = ts[:, segment_1, :][segment_1]
df_segment_2 = ts[:, segment_2, :][segment_2]
fig, axx = utils.create_mpl_ax(ax[i])
target_1 = df_segment_1.target
target_2 = df_segment_2.target
if target_1.dtype == int or target_2.dtype == int:
warnings.warn(
"At least one target column has integer dtype, "
"it is converted to float in order to calculate correlation."
)
target_1 = target_1.astype(float)
target_2 = target_2.astype(float)
lags, level, _, _ = axx.xcorr(x=target_1, y=target_2, maxlags=maxlags)
ax[i].plot(lags, level, "o", markersize=5)
ax[i].set_title(f"{segment_1} vs {segment_2}")
ax[i].xaxis.set_major_locator(MaxNLocator(integer=True))
plt.show()
def sample_acf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
"""
Autocorrelation plot for multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
Notes
-----
https://en.wikipedia.org/wiki/Autocorrelation
"""
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
fig.suptitle("Partial Autocorrelation", fontsize=16)
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_acf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def sample_pacf_plot(ts: "TSDataset", n_segments: int = 10, lags: int = 21, segments: Sequence = None):
"""
Partial autocorrelation plot for multiple timeseries.
Parameters
----------
ts:
TSDataset with timeseries data
n_segments:
number of random segments to plot
lags:
number of timeseries shifts for cross-correlation
segments:
segments to plot
Notes
-----
https://en.wikipedia.org/wiki/Partial_autocorrelation_function
"""
if not segments:
segments = sorted(ts.segments)
k = min(n_segments, len(segments))
columns_num = min(2, k)
rows_num = math.ceil(k / columns_num)
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 5 * rows_num), constrained_layout=True, squeeze=False)
ax = ax.ravel()
|
for i, name in enumerate(sorted(np.random.choice(segments, size=k, replace=False))):
df_slice = ts[:, name, :][name]
plot_pacf(x=df_slice["target"].values, ax=ax[i], lags=lags)
ax[i].set_title(name)
plt.show()
def distribution_plot(
ts: "TSDataset",
n_segments: int = 10,
segments: Sequence = None,
shift: int = 30,
window: int = 30,
freq: str = "1M",
n_rows: int = 10,
):
"""Distribution of z-values grouped by segments and time frequency.
... math:
mean_{i} = \\sum_{j=i-\\text{shift}}^{i-\\text{shift}+\\text{window}} \\frac{x_{j}}{\\text{window}}
Parameters
----------
ts:
dataset with timeseries data
n_segments:
number of random segments to plot
segments:
segments to plot
shift:
number of timeseries shifts for statistics calc
window:
number of points for statistics calc
freq:
group for z_{i}
n_rows:
maximum number of rows to plot
"""
df_pd = ts.to_pandas(flatten=True)
if not segments:
segments = df_pd.segment.unique()
segments = np.random.choice(segments, size=min(len(segments), n_segments), replace=False)
df_full = df_pd[df_pd.segment.isin(segments)]
df_full.loc[:, "mean"] = (
df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).mean())
)
df_full.loc[:, "std"] = df_full.groupby("segment").target.shift(shift).transform(lambda s: s.rolling(window).std())
df_full = df_full.dropna()
df_full.loc[:, "z"] = (df_full["target"] - df_full["mean"]) / df_full["std"]
grouped_data = df_full.groupby([df_full.timestamp.dt.to_period(freq)])
columns_num = min(2, len(grouped_data))
rows_num = min(n_rows, math.ceil(len(grouped_data) / columns_num))
groups = set(list(grouped_data.groups.keys())[-rows_num * columns_num :])
fig, ax = plt.subplots(rows_num, columns_num, figsize=(20, 7.5 * rows_num), constrained_layout=True, squeeze=False)
fig.suptitle(f"Z statistic shift: {shift} window: {window}", fontsize=16)
ax = ax.ravel()
i = 0
for period, df_slice in grouped_data:
if period not in groups:
continue
sns.boxplot(data=df_slice.sort_values(by="segment"), y="z", x="segment", ax=ax[i], fliersize=False)
ax[i].set_title(f"{period}")
i += 1
|
fig.suptitle("Partial Autocorrelation", fontsize=16)
|
helpers.go
|
package mobilesecurityserviceapp
import (
"context"
mobilesecurityservicev1alpha1 "github.com/aerogear/mobile-security-service-operator/pkg/apis/mobilesecurityservice/v1alpha1"
"github.com/aerogear/mobile-security-service-operator/pkg/utils"
"k8s.io/apimachinery/pkg/types"
)
const FinalizerMetadata = "finalizer.mobile-security-service.aerogear.org"
// hasConditionsToBeDeleted will return true if the Service instance was not found and/or is marked to be deleted
// OR
// if the APP CR was marked to be deleted
func hasConditionsToBeDeleted(mssApp *mobilesecurityservicev1alpha1.MobileSecurityServiceApp, mss *mobilesecurityservicev1alpha1.MobileSecurityService) bool
|
// isMobileSecurityServiceDeleted return true if it is not found because was deleted and/or was marked to be deleted
func (r *ReconcileMobileSecurityServiceApp) isMobileSecurityServiceDeleted(operatorNamespace string, mss *mobilesecurityservicev1alpha1.MobileSecurityService) bool {
if err := r.client.Get(context.TODO(), types.NamespacedName{Name: utils.MobileSecurityServiceCRName, Namespace: operatorNamespace}, mss); err != nil || mss.GetDeletionTimestamp() != nil {
return true
}
return false
}
|
{
//Check if the APP CR was marked to be deleted
isAppMarkedToBeDeleted := mssApp.GetDeletionTimestamp() != nil
hasFinalizer := len(mssApp.GetFinalizers()) > 0
isMssInstanceDeleted := mss == nil
isMssInstanceMarkedToBeDeleted := mss.GetDeletionTimestamp() != nil
return (isAppMarkedToBeDeleted && hasFinalizer) || isMssInstanceDeleted || isMssInstanceMarkedToBeDeleted
}
|
lib.rs
|
use wasm_bindgen::prelude::*;
use std::{sync::Mutex};
use once_cell::sync::Lazy;
mod utils;
mod blackjack;
use crate::blackjack::Game;
static GAME_STATE: Lazy<Mutex<Game>> = Lazy::new( || {
utils::set_panic_hook();
Mutex::new(Game::new())
});
#[wasm_bindgen]
pub fn start_new_game() {
let mut game_state = GAME_STATE.lock().unwrap();
*game_state = Game::new();
}
#[wasm_bindgen]
pub fn play_dealer() -> Option<u8> {
let mut game_state = GAME_STATE.lock().unwrap();
let dealer_score = game_state.play_dealer();
dealer_score
}
#[wasm_bindgen]
pub fn
|
() -> Option<u8> {
let mut game_state = GAME_STATE.lock().unwrap();
let player_score = game_state.hit_player();
player_score
}
#[wasm_bindgen]
pub fn check_player_bust() -> bool {
let game_state = GAME_STATE.lock().unwrap();
game_state.check_player_bust()
}
|
hit_player
|
createMultiFormatter.test.ts
|
import createMultiFormatter from '../../src/factories/createMultiFormatter';
describe('createMultiFormatter()', () => {
describe('creates a multi-step formatter', () => {
const formatter = createMultiFormatter({
id: 'my_format',
useLocalTime: true,
});
it('formats millisecond', () => {
expect(formatter(new Date(2018, 10, 20, 11, 22, 33, 100))).toEqual('.100');
});
it('formats second', () => {
expect(formatter(new Date(2018, 10, 20, 11, 22, 33))).toEqual(':33');
});
it('format minutes', () => {
expect(formatter(new Date(2018, 10, 20, 11, 22))).toEqual('11:22');
});
it('format hours', () => {
expect(formatter(new Date(2018, 10, 20, 11))).toEqual('11 AM');
});
it('format first day of week', () => {
expect(formatter(new Date(2018, 10, 18))).toEqual('Nov 18');
});
it('format other day of week', () => {
expect(formatter(new Date(2018, 10, 20))).toEqual('Tue 20');
});
it('format month', () => {
expect(formatter(new Date(2018, 10))).toEqual('November');
});
it('format year', () => {
expect(formatter(new Date(2018, 0))).toEqual('2018');
|
});
});
|
});
|
declarations-for-tuple-field-count-errors.rs
|
pub struct Z0;
pub struct Z1();
pub struct S(pub u8, pub u8, pub u8);
pub struct M(
pub u8,
pub u8,
pub u8,
);
pub enum
|
{ Z0, Z1(), S(u8, u8, u8) }
pub enum E2 {
S(u8, u8, u8),
M(
u8,
u8,
u8,
),
}
|
E1
|
usage_parser.rs
|
// Third Party
use vec_map::VecMap;
// Internal
use INTERNAL_ERROR_MSG;
use args::Arg;
use args::settings::ArgSettings;
#[derive(PartialEq, Debug)]
enum UsageToken {
Name,
ValName,
Short,
Long,
Help,
Multiple,
Unknown,
}
#[doc(hidden)]
#[derive(Debug)]
pub struct UsageParser<'a> {
usage: &'a str,
pos: usize,
start: usize,
prev: UsageToken,
explicit_name_set: bool,
}
impl<'a> UsageParser<'a> {
fn new(usage: &'a str) -> Self {
debugln!("UsageParser::new: usage={:?}", usage);
UsageParser {
usage: usage,
pos: 0,
start: 0,
prev: UsageToken::Unknown,
explicit_name_set: false,
}
}
pub fn from_usage(usage: &'a str) -> Self {
debugln!("UsageParser::from_usage;");
UsageParser::new(usage)
}
pub fn parse(mut self) -> Arg<'a, 'a> {
debugln!("UsageParser::parse;");
let mut arg = Arg::default();
loop {
debugln!("UsageParser::parse:iter: pos={};", self.pos);
self.stop_at(token);
if let Some(&c) = self.usage.as_bytes().get(self.pos) {
match c {
b'-' => self.short_or_long(&mut arg),
b'.' => self.multiple(&mut arg),
b'\'' => self.help(&mut arg),
_ => self.name(&mut arg),
}
} else {
break;
}
}
debug_assert!(!arg.name.is_empty(),
format!("No name found for Arg when parsing usage string: {}",
self.usage));
arg.num_vals = match arg.val_names {
Some(ref v) if v.len() >= 2 => Some(v.len() as u64),
_ => None,
};
debugln!("UsageParser::parse: vals...{:?}", arg.val_names);
arg
}
fn name(&mut self, arg: &mut Arg<'a, 'a>) {
debugln!("UsageParser::name;");
if *self.usage.as_bytes().get(self.pos).expect(INTERNAL_ERROR_MSG) == b'<' &&
!self.explicit_name_set {
arg.setb(ArgSettings::Required);
}
self.pos += 1;
self.stop_at(name_end);
let name = &self.usage[self.start..self.pos];
if self.prev == UsageToken::Unknown {
debugln!("UsageParser::name: setting name...{}", name);
arg.name = name;
if arg.long.is_none() && arg.short.is_none() {
debugln!("UsageParser::name: explicit name set...");
self.explicit_name_set = true;
self.prev = UsageToken::Name;
}
} else {
debugln!("UsageParser::name: setting val name...{}", name);
if let Some(ref mut v) = arg.val_names {
let len = v.len();
v.insert(len, name);
} else {
let mut v = VecMap::new();
v.insert(0, name);
arg.val_names = Some(v);
arg.setb(ArgSettings::TakesValue);
}
self.prev = UsageToken::ValName;
}
}
fn stop_at<F>(&mut self, f: F)
where F: Fn(u8) -> bool
{
debugln!("UsageParser::stop_at;");
self.start = self.pos;
self.pos += self.usage[self.start..].bytes().take_while(|&b| f(b)).count();
}
fn short_or_long(&mut self, arg: &mut Arg<'a, 'a>) {
debugln!("UsageParser::short_or_long;");
self.pos += 1;
if *self.usage.as_bytes().get(self.pos).expect(INTERNAL_ERROR_MSG) == b'-' {
self.pos += 1;
self.long(arg);
return;
}
self.short(arg)
}
fn long(&mut self, arg: &mut Arg<'a, 'a>) {
debugln!("UsageParser::long;");
self.stop_at(long_end);
let name = &self.usage[self.start..self.pos];
if !self.explicit_name_set {
debugln!("UsageParser::long: setting name...{}", name);
arg.name = name;
}
debugln!("UsageParser::long: setting long...{}", name);
arg.long = Some(name);
self.prev = UsageToken::Long;
}
fn short(&mut self, arg: &mut Arg<'a, 'a>) {
debugln!("UsageParser::short;");
let start = &self.usage[self.pos..];
let short = start.chars().nth(0).expect(INTERNAL_ERROR_MSG);
debugln!("UsageParser::short: setting short...{}", short);
arg.short = Some(short);
if arg.name.is_empty() {
// --long takes precedence but doesn't set self.explicit_name_set
let name = &start[..short.len_utf8()];
debugln!("UsageParser::short: setting name...{}", name);
arg.name = name;
}
self.prev = UsageToken::Short;
}
// "something..."
fn multiple(&mut self, arg: &mut Arg) {
debugln!("UsageParser::multiple;");
let mut dot_counter = 1;
let start = self.pos;
let mut bytes = self.usage[start..].bytes();
while bytes.next() == Some(b'.') {
dot_counter += 1;
self.pos += 1;
if dot_counter == 3 {
debugln!("UsageParser::multiple: setting multiple");
arg.setb(ArgSettings::Multiple);
if arg.settings.is_set(ArgSettings::TakesValue) {
arg.setb(ArgSettings::UseValueDelimiter);
arg.unsetb(ArgSettings::ValueDelimiterNotSet);
if arg.val_delim.is_none() {
arg.val_delim = Some(',');
}
}
self.prev = UsageToken::Multiple;
self.pos += 1;
break;
}
}
}
fn help(&mut self, arg: &mut Arg<'a, 'a>) {
debugln!("UsageParser::help;");
self.stop_at(help_start);
self.start = self.pos + 1;
self.pos = self.usage.len() - 1;
debugln!("UsageParser::help: setting help...{}", &self.usage[self.start..self.pos]);
arg.help = Some(&self.usage[self.start..self.pos]);
self.pos += 1; // Move to next byte to keep from thinking ending ' is a start
self.prev = UsageToken::Help;
}
}
#[inline]
fn name_end(b: u8) -> bool { b != b']' && b != b'>' }
#[inline]
fn token(b: u8) -> bool { b != b'\'' && b != b'.' && b != b'<' && b != b'[' && b != b'-' }
#[inline]
fn long_end(b: u8) -> bool {
b != b'\'' && b != b'.' && b != b'<' && b != b'[' && b != b'=' && b != b' '
}
#[inline]
fn help_start(b: u8) -> bool { b != b'\'' }
#[cfg(test)]
mod test {
use args::Arg;
use args::ArgSettings;
#[test]
fn create_flag_usage() {
let a = Arg::from_usage("[flag] -f 'some help info'");
assert_eq!(a.name, "flag");
assert_eq!(a.short.unwrap(), 'f');
assert!(a.long.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.val_names.is_none());
assert!(a.num_vals.is_none());
let b = Arg::from_usage("[flag] --flag 'some help info'");
assert_eq!(b.name, "flag");
assert_eq!(b.long.unwrap(), "flag");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(a.val_names.is_none());
assert!(a.num_vals.is_none());
let b = Arg::from_usage("--flag 'some help info'");
assert_eq!(b.name, "flag");
assert_eq!(b.long.unwrap(), "flag");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.val_names.is_none());
assert!(b.num_vals.is_none());
let c = Arg::from_usage("[flag] -f --flag 'some help info'");
assert_eq!(c.name, "flag");
assert_eq!(c.short.unwrap(), 'f');
assert_eq!(c.long.unwrap(), "flag");
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
let d = Arg::from_usage("[flag] -f... 'some help info'");
assert_eq!(d.name, "flag");
assert_eq!(d.short.unwrap(), 'f');
assert!(d.long.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.val_names.is_none());
assert!(d.num_vals.is_none());
let e = Arg::from_usage("[flag] -f --flag... 'some help info'");
assert_eq!(e.name, "flag");
assert_eq!(e.long.unwrap(), "flag");
assert_eq!(e.short.unwrap(), 'f');
assert_eq!(e.help.unwrap(), "some help info");
assert!(e.is_set(ArgSettings::Multiple));
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("-f --flag... 'some help info'");
assert_eq!(e.name, "flag");
assert_eq!(e.long.unwrap(), "flag");
assert_eq!(e.short.unwrap(), 'f');
assert_eq!(e.help.unwrap(), "some help info");
assert!(e.is_set(ArgSettings::Multiple));
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("--flags");
assert_eq!(e.name, "flags");
assert_eq!(e.long.unwrap(), "flags");
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("--flags...");
assert_eq!(e.name, "flags");
assert_eq!(e.long.unwrap(), "flags");
assert!(e.is_set(ArgSettings::Multiple));
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("[flags] -f");
assert_eq!(e.name, "flags");
assert_eq!(e.short.unwrap(), 'f');
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("[flags] -f...");
assert_eq!(e.name, "flags");
assert_eq!(e.short.unwrap(), 'f');
assert!(e.is_set(ArgSettings::Multiple));
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let a = Arg::from_usage("-f 'some help info'");
assert_eq!(a.name, "f");
assert_eq!(a.short.unwrap(), 'f');
assert!(a.long.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.val_names.is_none());
assert!(a.num_vals.is_none());
let e = Arg::from_usage("-f");
assert_eq!(e.name, "f");
assert_eq!(e.short.unwrap(), 'f');
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
let e = Arg::from_usage("-f...");
assert_eq!(e.name, "f");
assert_eq!(e.short.unwrap(), 'f');
assert!(e.is_set(ArgSettings::Multiple));
assert!(e.val_names.is_none());
assert!(e.num_vals.is_none());
}
#[test]
fn create_option_usage0() {
// Short only
let a = Arg::from_usage("[option] -o [opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.short.unwrap(), 'o');
assert!(a.long.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage1() {
let b = Arg::from_usage("-o [opt] 'some help info'");
assert_eq!(b.name, "o");
assert_eq!(b.short.unwrap(), 'o');
assert!(b.long.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage2() {
let c = Arg::from_usage("<option> -o <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.short.unwrap(), 'o');
assert!(c.long.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage3() {
let d = Arg::from_usage("-o <opt> 'some help info'");
assert_eq!(d.name, "o");
assert_eq!(d.short.unwrap(), 'o');
assert!(d.long.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage4() {
let a = Arg::from_usage("[option] -o [opt]... 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.short.unwrap(), 'o');
assert!(a.long.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage5() {
let a = Arg::from_usage("[option]... -o [opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.short.unwrap(), 'o');
assert!(a.long.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage6() {
let b = Arg::from_usage("-o [opt]... 'some help info'");
assert_eq!(b.name, "o");
assert_eq!(b.short.unwrap(), 'o');
assert!(b.long.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage7() {
let c = Arg::from_usage("<option> -o <opt>... 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.short.unwrap(), 'o');
assert!(c.long.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage8() {
let c = Arg::from_usage("<option>... -o <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.short.unwrap(), 'o');
assert!(c.long.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage9() {
let d = Arg::from_usage("-o <opt>... 'some help info'");
assert_eq!(d.name, "o");
assert_eq!(d.short.unwrap(), 'o');
assert!(d.long.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_long1() {
let a = Arg::from_usage("[option] --opt [opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_long2() {
let b = Arg::from_usage("--opt [option] 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_long3() {
let c = Arg::from_usage("<option> --opt <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long4() {
let d = Arg::from_usage("--opt <option> 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert!(d.short.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_long5() {
let a = Arg::from_usage("[option] --opt [opt]... 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_long6() {
let a = Arg::from_usage("[option]... --opt [opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_long7() {
let b = Arg::from_usage("--opt [option]... 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_long8() {
let c = Arg::from_usage("<option> --opt <opt>... 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long9() {
let c = Arg::from_usage("<option>... --opt <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long10() {
let d = Arg::from_usage("--opt <option>... 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert!(d.short.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals1()
|
#[test]
fn create_option_usage_long_equals2() {
let b = Arg::from_usage("--opt=[option] 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals3() {
let c = Arg::from_usage("<option> --opt=<opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals4() {
let d = Arg::from_usage("--opt=<option> 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert!(d.short.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals5() {
let a = Arg::from_usage("[option] --opt=[opt]... 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals6() {
let a = Arg::from_usage("[option]... --opt=[opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals7() {
let b = Arg::from_usage("--opt=[option]... 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert!(b.short.is_none());
assert_eq!(b.help.unwrap(), "some help info");
assert!(b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals8() {
let c = Arg::from_usage("<option> --opt=<opt>... 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals9() {
let c = Arg::from_usage("<option>... --opt=<opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert!(c.short.is_none());
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_long_equals10() {
let d = Arg::from_usage("--opt=<option>... 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert!(d.short.is_none());
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_both1() {
let a = Arg::from_usage("[option] -o --opt [option] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert_eq!(a.short.unwrap(), 'o');
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_both2() {
let b = Arg::from_usage("-o --opt [option] 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert_eq!(b.short.unwrap(), 'o');
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_both3() {
let c = Arg::from_usage("<option> -o --opt <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert_eq!(c.short.unwrap(), 'o');
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_both4() {
let d = Arg::from_usage("-o --opt <option> 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_both5() {
let a = Arg::from_usage("[option]... -o --opt [option] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert_eq!(a.short.unwrap(), 'o');
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_both6() {
let b = Arg::from_usage("-o --opt [option]... 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert_eq!(b.short.unwrap(), 'o');
assert_eq!(b.help.unwrap(), "some help info");
assert!(b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_both7() {
let c = Arg::from_usage("<option>... -o --opt <opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert_eq!(c.short.unwrap(), 'o');
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_both8() {
let d = Arg::from_usage("-o --opt <option>... 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals1() {
let a = Arg::from_usage("[option] -o --opt=[option] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert_eq!(a.short.unwrap(), 'o');
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals2() {
let b = Arg::from_usage("-o --opt=[option] 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert_eq!(b.short.unwrap(), 'o');
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals3() {
let c = Arg::from_usage("<option> -o --opt=<opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert_eq!(c.short.unwrap(), 'o');
assert_eq!(c.help.unwrap(), "some help info");
assert!(!c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals4() {
let d = Arg::from_usage("-o --opt=<option> 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals5() {
let a = Arg::from_usage("[option]... -o --opt=[option] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert_eq!(a.short.unwrap(), 'o');
assert_eq!(a.help.unwrap(), "some help info");
assert!(a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(a.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals6() {
let b = Arg::from_usage("-o --opt=[option]... 'some help info'");
assert_eq!(b.name, "opt");
assert_eq!(b.long.unwrap(), "opt");
assert_eq!(b.short.unwrap(), 'o');
assert_eq!(b.help.unwrap(), "some help info");
assert!(b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::TakesValue));
assert!(!b.is_set(ArgSettings::Required));
assert_eq!(b.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(b.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals7() {
let c = Arg::from_usage("<option>... -o --opt=<opt> 'some help info'");
assert_eq!(c.name, "option");
assert_eq!(c.long.unwrap(), "opt");
assert_eq!(c.short.unwrap(), 'o');
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(c.is_set(ArgSettings::TakesValue));
assert!(c.is_set(ArgSettings::Required));
assert_eq!(c.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(c.num_vals.is_none());
}
#[test]
fn create_option_usage_both_equals8() {
let d = Arg::from_usage("-o --opt=<option>... 'some help info'");
assert_eq!(d.name, "opt");
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"option"]);
assert!(d.num_vals.is_none());
}
#[test]
fn create_option_with_vals1() {
let d = Arg::from_usage("-o <file> <mode> 'some help info'");
assert_eq!(d.name, "o");
assert!(d.long.is_none());
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"file", &"mode"]);
assert_eq!(d.num_vals.unwrap(), 2);
}
#[test]
fn create_option_with_vals2() {
let d = Arg::from_usage("-o <file> <mode>... 'some help info'");
assert_eq!(d.name, "o");
assert!(d.long.is_none());
assert_eq!(d.short.unwrap(), 'o');
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"file", &"mode"]);
assert_eq!(d.num_vals.unwrap(), 2);
}
#[test]
fn create_option_with_vals3() {
let d = Arg::from_usage("--opt <file> <mode>... 'some help info'");
assert_eq!(d.name, "opt");
assert!(d.short.is_none());
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"file", &"mode"]);
assert_eq!(d.num_vals.unwrap(), 2);
}
#[test]
fn create_option_with_vals4() {
let d = Arg::from_usage("[myopt] --opt <file> <mode> 'some help info'");
assert_eq!(d.name, "myopt");
assert!(d.short.is_none());
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(!d.is_set(ArgSettings::Required));
assert_eq!(d.val_names.unwrap().values().collect::<Vec<_>>(),
[&"file", &"mode"]);
assert_eq!(d.num_vals.unwrap(), 2);
}
#[test]
fn create_option_with_vals5() {
let d = Arg::from_usage("--opt <file> <mode> 'some help info'");
assert_eq!(d.name, "opt");
assert!(d.short.is_none());
assert_eq!(d.long.unwrap(), "opt");
assert_eq!(d.help.unwrap(), "some help info");
assert!(!d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::TakesValue));
assert!(d.is_set(ArgSettings::Required));
assert_eq!(d.num_vals.unwrap(), 2);
}
#[test]
fn create_positional_usage() {
let a = Arg::from_usage("[pos] 'some help info'");
assert_eq!(a.name, "pos");
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(!a.is_set(ArgSettings::Required));
assert!(a.val_names.is_none());
assert!(a.num_vals.is_none());
}
#[test]
fn create_positional_usage0() {
let b = Arg::from_usage("<pos> 'some help info'");
assert_eq!(b.name, "pos");
assert_eq!(b.help.unwrap(), "some help info");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::Required));
assert!(b.val_names.is_none());
assert!(b.num_vals.is_none());
}
#[test]
fn pos_mult_help() {
let c = Arg::from_usage("[pos]... 'some help info'");
assert_eq!(c.name, "pos");
assert_eq!(c.help.unwrap(), "some help info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn pos_help_lit_single_quote() {
let c = Arg::from_usage("[pos]... 'some help\' info'");
assert_eq!(c.name, "pos");
assert_eq!(c.help.unwrap(), "some help' info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn pos_help_double_lit_single_quote() {
let c = Arg::from_usage("[pos]... 'some \'help\' info'");
assert_eq!(c.name, "pos");
assert_eq!(c.help.unwrap(), "some 'help' info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn pos_help_newline() {
let c = Arg::from_usage("[pos]... 'some help{n}\
info'");
assert_eq!(c.name, "pos");
assert_eq!(c.help.unwrap(), "some help{n}info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn pos_help_newline_lit_sq() {
let c = Arg::from_usage("[pos]... 'some help\' stuff{n}\
info'");
assert_eq!(c.name, "pos");
assert_eq!(c.help.unwrap(), "some help' stuff{n}info");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn pos_req_mult_help() {
let d = Arg::from_usage("<pos>... 'some help info'");
assert_eq!(d.name, "pos");
assert_eq!(d.help.unwrap(), "some help info");
assert!(d.is_set(ArgSettings::Multiple));
assert!(d.is_set(ArgSettings::Required));
assert!(d.val_names.is_none());
assert!(d.num_vals.is_none());
}
#[test]
fn pos_req() {
let b = Arg::from_usage("<pos>");
assert_eq!(b.name, "pos");
assert!(!b.is_set(ArgSettings::Multiple));
assert!(b.is_set(ArgSettings::Required));
assert!(b.val_names.is_none());
assert!(b.num_vals.is_none());
}
#[test]
fn pos_mult() {
let c = Arg::from_usage("[pos]...");
assert_eq!(c.name, "pos");
assert!(c.is_set(ArgSettings::Multiple));
assert!(!c.is_set(ArgSettings::Required));
assert!(c.val_names.is_none());
assert!(c.num_vals.is_none());
}
#[test]
fn nonascii() {
let a = Arg::from_usage("<ASCII> 'üñíčöĐ€'");
assert_eq!(a.name, "ASCII");
assert_eq!(a.help, Some("üñíčöĐ€"));
let a = Arg::from_usage("<üñíčöĐ€> 'ASCII'");
assert_eq!(a.name, "üñíčöĐ€");
assert_eq!(a.help, Some("ASCII"));
let a = Arg::from_usage("<üñíčöĐ€> 'üñíčöĐ€'");
assert_eq!(a.name, "üñíčöĐ€");
assert_eq!(a.help, Some("üñíčöĐ€"));
let a = Arg::from_usage("-ø 'ø'");
assert_eq!(a.name, "ø");
assert_eq!(a.short, Some('ø'));
assert_eq!(a.help, Some("ø"));
let a = Arg::from_usage("--üñíčöĐ€ 'Nōṫ ASCII'");
assert_eq!(a.name, "üñíčöĐ€");
assert_eq!(a.long, Some("üñíčöĐ€"));
assert_eq!(a.help, Some("Nōṫ ASCII"));
let a = Arg::from_usage("[ñämê] --ôpt=[üñíčöĐ€] 'hælp'");
assert_eq!(a.name, "ñämê");
assert_eq!(a.long, Some("ôpt"));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(),
[&"üñíčöĐ€"]);
assert_eq!(a.help, Some("hælp"));
}
}
|
{
let a = Arg::from_usage("[option] --opt=[opt] 'some help info'");
assert_eq!(a.name, "option");
assert_eq!(a.long.unwrap(), "opt");
assert!(a.short.is_none());
assert_eq!(a.help.unwrap(), "some help info");
assert!(!a.is_set(ArgSettings::Multiple));
assert!(a.is_set(ArgSettings::TakesValue));
assert!(!a.is_set(ArgSettings::Required));
assert_eq!(a.val_names.unwrap().values().collect::<Vec<_>>(), [&"opt"]);
assert!(a.num_vals.is_none());
}
|
vuecal.umd.min.js
|
(function(t,e){"object"===typeof exports&&"object"===typeof module?module.exports=e(require("vue")):"function"===typeof define&&define.amd?define([],e):"object"===typeof exports?exports["vuecal"]=e(require("vue")):t["vuecal"]=e(t["Vue"])})("undefined"!==typeof self?self:this,(function(t){return function(t){function e(e){for(var n,r,a=e[0],s=e[1],o=0,l=[];o<a.length;o++)r=a[o],Object.prototype.hasOwnProperty.call(i,r)&&i[r]&&l.push(i[r][0]),i[r]=0;for(n in s)Object.prototype.hasOwnProperty.call(s,n)&&(t[n]=s[n]);u&&u(e);while(l.length)l.shift()()}var n={},i={40:0};function r(t){return a.p+"vuecal.umd.min."+({0:"i18n/ar",1:"i18n/bg",2:"i18n/bn",3:"i18n/bs",4:"i18n/ca",5:"i18n/cs",6:"i18n/da",7:"i18n/de",8:"i18n/el",9:"i18n/es",10:"i18n/fa",11:"i18n/fr",12:"i18n/he",13:"i18n/hr",14:"i18n/hu",15:"i18n/id",16:"i18n/is",17:"i18n/it",18:"i18n/ja",19:"i18n/ka",20:"i18n/ko",21:"i18n/lt",22:"i18n/mn",23:"i18n/nl",24:"i18n/no",25:"i18n/pl",26:"i18n/pt-br",27:"i18n/ro",28:"i18n/ru",29:"i18n/sk",30:"i18n/sl",31:"i18n/sq",32:"i18n/sr",33:"i18n/sv",34:"i18n/tr",35:"i18n/uk",36:"i18n/vi",37:"i18n/zh-cn",38:"i18n/zh-hk",39:"drag-and-drop"}[t]||t)+".js"}function a(e){if(n[e])return n[e].exports;var i=n[e]={i:e,l:!1,exports:{}};return t[e].call(i.exports,i,i.exports,a),i.l=!0,i.exports}a.e=function(t){var e=[],n=i[t];if(0!==n)if(n)e.push(n[2]);else{var s=new Promise((function(e,r){n=i[t]=[e,r]}));e.push(n[2]=s);var o,l=document.createElement("script");l.charset="utf-8",l.timeout=120,a.nc&&l.setAttribute("nonce",a.nc),l.src=r(t);var u=new Error;o=function(e){l.onerror=l.onload=null,clearTimeout(c);var n=i[t];if(0!==n){if(n){var r=e&&("load"===e.type?"missing":e.type),a=e&&e.target&&e.target.src;u.message="Loading chunk "+t+" failed.\n("+r+": "+a+")",u.name="ChunkLoadError",u.type=r,u.request=a,n[1](u)}i[t]=void 0}};var c=setTimeout((function(){o({type:"timeout",target:l})}),12e4);l.onerror=l.onload=o,document.head.appendChild(l)}return Promise.all(e)},a.m=t,a.c=n,a.d=function(t,e,n){a.o(t,e)||Object.defineProperty(t,e,{enumerable:!0,get:n})},a.r=function(t){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})},a.t=function(t,e){if(1&e&&(t=a(t)),8&e)return t;if(4&e&&"object"===typeof t&&t&&t.__esModule)return t;var n=Object.create(null);if(a.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:t}),2&e&&"string"!=typeof t)for(var i in t)a.d(n,i,function(e){return t[e]}.bind(null,i));return n},a.n=function(t){var e=t&&t.__esModule?function(){return t["default"]}:function(){return t};return a.d(e,"a",e),e},a.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},a.p="",a.oe=function(t){throw console.error(t),t};var s=("undefined"!==typeof self?self:this)["webpackJsonpvuecal"]=("undefined"!==typeof self?self:this)["webpackJsonpvuecal"]||[],o=s.push.bind(s);s.push=e,s=s.slice();for(var l=0;l<s.length;l++)e(s[l]);var u=o;return a(a.s="fb15")}({"00ee":function(t,e,n){var i=n("b622"),r=i("toStringTag"),a={};a[r]="z",t.exports="[object z]"===String(a)},"0366":function(t,e,n){var i=n("1c0b");t.exports=function(t,e,n){if(i(t),void 0===e)return t;switch(n){case 0:return function(){return t.call(e)};case 1:return function(n){return t.call(e,n)};case 2:return function(n,i){return t.call(e,n,i)};case 3:return function(n,i,r){return t.call(e,n,i,r)}}return function(){return t.apply(e,arguments)}}},"057f":function(t,e,n){var i=n("fc6a"),r=n("241c").f,a={}.toString,s="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[],o=function(t){try{return r(t)}catch(e){return s.slice()}};t.exports.f=function(t){return s&&"[object Window]"==a.call(t)?o(t):r(i(t))}},"06cf":function(t,e,n){var i=n("83ab"),r=n("d1e7"),a=n("5c6c"),s=n("fc6a"),o=n("c04e"),l=n("5135"),u=n("0cfb"),c=Object.getOwnPropertyDescriptor;e.f=i?c:function(t,e){if(t=s(t),e=o(e,!0),u)try{return c(t,e)}catch(n){}if(l(t,e))return a(!r.f.call(t,e),t[e])}},"0a96":function(t){t.exports=JSON.parse('{"weekDays":["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"],"months":["January","February","March","April","May","June","July","August","September","October","November","December"],"years":"Years","year":"Year","month":"Month","week":"Week","day":"Day","today":"Today","noEvent":"No Event","allDay":"All day","deleteEvent":"Delete","createEvent":"Create an event","dateFormat":"dddd MMMM D{S}, YYYY"}')},"0cb2":function(t,e,n){var i=n("7b0b"),r=Math.floor,a="".replace,s=/\$([$&'`]|\d{1,2}|<[^>]*>)/g,o=/\$([$&'`]|\d{1,2})/g;t.exports=function(t,e,n,l,u,c){var d=n+t.length,f=l.length,v=o;return void 0!==u&&(u=i(u),v=s),a.call(c,v,(function(i,a){var s;switch(a.charAt(0)){case"$":return"$";case"&":return t;case"`":return e.slice(0,n);case"'":return e.slice(d);case"<":s=u[a.slice(1,-1)];break;default:var o=+a;if(0===o)return i;if(o>f){var c=r(o/10);return 0===c?i:c<=f?void 0===l[c-1]?a.charAt(1):l[c-1]+a.charAt(1):i}s=l[o-1]}return void 0===s?"":s}))}},"0cfb":function(t,e,n){var i=n("83ab"),r=n("d039"),a=n("cc12");t.exports=!i&&!r((function(){return 7!=Object.defineProperty(a("div"),"a",{get:function(){return 7}}).a}))},1148:function(t,e,n){"use strict";var i=n("a691"),r=n("1d80");t.exports="".repeat||function(t){var e=String(r(this)),n="",a=i(t);if(a<0||a==1/0)throw RangeError("Wrong number of repetitions");for(;a>0;(a>>>=1)&&(e+=e))1&a&&(n+=e);return n}},1276:function(t,e,n){"use strict";var i=n("d784"),r=n("44e7"),a=n("825a"),s=n("1d80"),o=n("4840"),l=n("8aa5"),u=n("50c4"),c=n("14c3"),d=n("9263"),f=n("d039"),v=[].push,h=Math.min,p=4294967295,y=!f((function(){return!RegExp(p,"y")}));i("split",2,(function(t,e,n){var i;return i="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,n){var i=String(s(this)),a=void 0===n?p:n>>>0;if(0===a)return[];if(void 0===t)return[i];if(!r(t))return e.call(i,t,a);var o,l,u,c=[],f=(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":""),h=0,y=new RegExp(t.source,f+"g");while(o=d.call(y,i)){if(l=y.lastIndex,l>h&&(c.push(i.slice(h,o.index)),o.length>1&&o.index<i.length&&v.apply(c,o.slice(1)),u=o[0].length,h=l,c.length>=a))break;y.lastIndex===o.index&&y.lastIndex++}return h===i.length?!u&&y.test("")||c.push(""):c.push(i.slice(h)),c.length>a?c.slice(0,a):c}:"0".split(void 0,0).length?function(t,n){return void 0===t&&0===n?[]:e.call(this,t,n)}:e,[function(e,n){var r=s(this),a=void 0==e?void 0:e[t];return void 0!==a?a.call(e,r,n):i.call(String(r),e,n)},function(t,r){var s=n(i,t,this,r,i!==e);if(s.done)return s.value;var d=a(t),f=String(this),v=o(d,RegExp),m=d.unicode,g=(d.ignoreCase?"i":"")+(d.multiline?"m":"")+(d.unicode?"u":"")+(y?"y":"g"),b=new v(y?d:"^(?:"+d.source+")",g),w=void 0===r?p:r>>>0;if(0===w)return[];if(0===f.length)return null===c(b,f)?[f]:[];var D=0,_=0,E=[];while(_<f.length){b.lastIndex=y?_:0;var k,S=c(b,y?f:f.slice(_));if(null===S||(k=h(u(b.lastIndex+(y?0:_)),f.length))===D)_=l(f,_,m);else{if(E.push(f.slice(D,_)),E.length===w)return E;for(var x=1;x<=S.length-1;x++)if(E.push(S[x]),E.length===w)return E;_=D=k}}return E.push(f.slice(D)),E}]}),!y)},"12cd":function(t,e,n){},1332:function(t,e,n){},"13d5":function(t,e,n){"use strict";var i=n("23e7"),r=n("d58f").left,a=n("a640"),s=n("2d00"),o=n("605d"),l=a("reduce"),u=!o&&s>79&&s<83;i({target:"Array",proto:!0,forced:!l||u},{reduce:function(t){return r(this,t,arguments.length,arguments.length>1?arguments[1]:void 0)}})},"14c3":function(t,e,n){var i=n("c6b6"),r=n("9263");t.exports=function(t,e){var n=t.exec;if("function"===typeof n){var a=n.call(t,e);if("object"!==typeof a)throw TypeError("RegExp exec method returned something other than an Object or null");return a}if("RegExp"!==i(t))throw TypeError("RegExp#exec called on incompatible receiver");return r.call(t,e)}},"159b":function(t,e,n){var i=n("da84"),r=n("fdbc"),a=n("17c2"),s=n("9112");for(var o in r){var l=i[o],u=l&&l.prototype;if(u&&u.forEach!==a)try{s(u,"forEach",a)}catch(c){u.forEach=a}}},"17c2":function(t,e,n){"use strict";var i=n("b727").forEach,r=n("a640"),a=r("forEach");t.exports=a?[].forEach:function(t){return i(this,t,arguments.length>1?arguments[1]:void 0)}},"19aa":function(t,e){t.exports=function(t,e,n){if(!(t instanceof e))throw TypeError("Incorrect "+(n?n+" ":"")+"invocation");return t}},"1be4":function(t,e,n){var i=n("d066");t.exports=i("document","documentElement")},"1c0b":function(t,e){t.exports=function(t){if("function"!=typeof t)throw TypeError(String(t)+" is not a function");return t}},"1c7e":function(t,e,n){var i=n("b622"),r=i("iterator"),a=!1;try{var s=0,o={next:function(){return{done:!!s++}},return:function(){a=!0}};o[r]=function(){return this},Array.from(o,(function(){throw 2}))}catch(l){}t.exports=function(t,e){if(!e&&!a)return!1;var n=!1;try{var i={};i[r]=function(){return{next:function(){return{done:n=!0}}}},t(i)}catch(l){}return n}},"1cdc":function(t,e,n){var i=n("342f");t.exports=/(iphone|ipod|ipad).*applewebkit/i.test(i)},"1d80":function(t,e){t.exports=function(t){if(void 0==t)throw TypeError("Can't call method on "+t);return t}},"1dde":function(t,e,n){var i=n("d039"),r=n("b622"),a=n("2d00"),s=r("species");t.exports=function(t){return a>=51||!i((function(){var e=[],n=e.constructor={};return n[s]=function(){return{foo:1}},1!==e[t](Boolean).foo}))}},2170:function(t,e,n){},2266:function(t,e,n){var i=n("825a"),r=n("e95a"),a=n("50c4"),s=n("0366"),o=n("35a1"),l=n("2a62"),u=function(t,e){this.stopped=t,this.result=e};t.exports=function(t,e,n){var c,d,f,v,h,p,y,m=n&&n.that,g=!(!n||!n.AS_ENTRIES),b=!(!n||!n.IS_ITERATOR),w=!(!n||!n.INTERRUPTED),D=s(e,m,1+g+w),_=function(t){return c&&l(c),new u(!0,t)},E=function(t){return g?(i(t),w?D(t[0],t[1],_):D(t[0],t[1])):w?D(t,_):D(t)};if(b)c=t;else{if(d=o(t),"function"!=typeof d)throw TypeError("Target is not iterable");if(r(d)){for(f=0,v=a(t.length);v>f;f++)if(h=E(t[f]),h&&h instanceof u)return h;return new u(!1)}c=d.call(t)}p=c.next;while(!(y=p.call(c)).done){try{h=E(y.value)}catch(k){throw l(c),k}if("object"==typeof h&&h&&h instanceof u)return h}return new u(!1)}},"23cb":function(t,e,n){var i=n("a691"),r=Math.max,a=Math.min;t.exports=function(t,e){var n=i(t);return n<0?r(n+e,0):a(n,e)}},"23e7":function(t,e,n){var i=n("da84"),r=n("06cf").f,a=n("9112"),s=n("6eeb"),o=n("ce4e"),l=n("e893"),u=n("94ca");t.exports=function(t,e){var n,c,d,f,v,h,p=t.target,y=t.global,m=t.stat;if(c=y?i:m?i[p]||o(p,{}):(i[p]||{}).prototype,c)for(d in e){if(v=e[d],t.noTargetGet?(h=r(c,d),f=h&&h.value):f=c[d],n=u(y?d:p+(m?".":"#")+d,t.forced),!n&&void 0!==f){if(typeof v===typeof f)continue;l(v,f)}(t.sham||f&&f.sham)&&a(v,"sham",!0),s(c,d,v,t)}}},"241c":function(t,e,n){var i=n("ca84"),r=n("7839"),a=r.concat("length","prototype");e.f=Object.getOwnPropertyNames||function(t){return i(t,a)}},2532:function(t,e,n){"use strict";var i=n("23e7"),r=n("5a34"),a=n("1d80"),s=n("ab13");i({target:"String",proto:!0,forced:!s("includes")},{includes:function(t){return!!~String(a(this)).indexOf(r(t),arguments.length>1?arguments[1]:void 0)}})},"25f0":function(t,e,n){"use strict";var i=n("6eeb"),r=n("825a"),a=n("d039"),s=n("ad6d"),o="toString",l=RegExp.prototype,u=l[o],c=a((function(){return"/a/b"!=u.call({source:"a",flags:"b"})})),d=u.name!=o;(c||d)&&i(RegExp.prototype,o,(function(){var t=r(this),e=String(t.source),n=t.flags,i=String(void 0===n&&t instanceof RegExp&&!("flags"in l)?s.call(t):n);return"/"+e+"/"+i}),{unsafe:!0})},2626:function(t,e,n){"use strict";var i=n("d066"),r=n("9bf2"),a=n("b622"),s=n("83ab"),o=a("species");t.exports=function(t){var e=i(t),n=r.f;s&&e&&!e[o]&&n(e,o,{configurable:!0,get:function(){return this}})}},"2a62":function(t,e,n){var i=n("825a");t.exports=function(t){var e=t["return"];if(void 0!==e)return i(e.call(t)).value}},"2cf4":function(t,e,n){var i,r,a,s=n("da84"),o=n("d039"),l=n("0366"),u=n("1be4"),c=n("cc12"),d=n("1cdc"),f=n("605d"),v=s.location,h=s.setImmediate,p=s.clearImmediate,y=s.process,m=s.MessageChannel,g=s.Dispatch,b=0,w={},D="onreadystatechange",_=function(t){if(w.hasOwnProperty(t)){var e=w[t];delete w[t],e()}},E=function(t){return function(){_(t)}},k=function(t){_(t.data)},S=function(t){s.postMessage(t+"",v.protocol+"//"+v.host)};h&&p||(h=function(t){var e=[],n=1;while(arguments.length>n)e.push(arguments[n++]);return w[++b]=function(){("function"==typeof t?t:Function(t)).apply(void 0,e)},i(b),b},p=function(t){delete w[t]},f?i=function(t){y.nextTick(E(t))}:g&&g.now?i=function(t){g.now(E(t))}:m&&!d?(r=new m,a=r.port2,r.port1.onmessage=k,i=l(a.postMessage,a,1)):s.addEventListener&&"function"==typeof postMessage&&!s.importScripts&&v&&"file:"!==v.protocol&&!o(S)?(i=S,s.addEventListener("message",k,!1)):i=D in c("script")?function(t){u.appendChild(c("script"))[D]=function(){u.removeChild(this),_(t)}}:function(t){setTimeout(E(t),0)}),t.exports={set:h,clear:p}},"2d00":function(t,e,n){var i,r,a=n("da84"),s=n("342f"),o=a.process,l=o&&o.versions,u=l&&l.v8;u?(i=u.split("."),r=i[0]+i[1]):s&&(i=s.match(/Edge\/(\d+)/),(!i||i[1]>=74)&&(i=s.match(/Chrome\/(\d+)/),i&&(r=i[1]))),t.exports=r&&+r},"342f":function(t,e,n){var i=n("d066");t.exports=i("navigator","userAgent")||""},"35a1":function(t,e,n){var i=n("f5df"),r=n("3f8c"),a=n("b622"),s=a("iterator");t.exports=function(t){if(void 0!=t)return t[s]||t["@@iterator"]||r[i(t)]}},"37e8":function(t,e,n){var i=n("83ab"),r=n("9bf2"),a=n("825a"),s=n("df75");t.exports=i?Object.defineProperties:function(t,e){a(t);var n,i=s(e),o=i.length,l=0;while(o>l)r.f(t,n=i[l++],e[n]);return t}},"38cf":function(t,e,n){var i=n("23e7"),r=n("1148");i({target:"String",proto:!0},{repeat:r})},"3bbe":function(t,e,n){var i=n("861d");t.exports=function(t){if(!i(t)&&null!==t)throw TypeError("Can't set "+String(t)+" as a prototype");return t}},"3ca3":function(t,e,n){"use strict";var i=n("6547").charAt,r=n("69f3"),a=n("7dd0"),s="String Iterator",o=r.set,l=r.getterFor(s);a(String,"String",(function(t){o(this,{type:s,string:String(t),index:0})}),(function(){var t,e=l(this),n=e.string,r=e.index;return r>=n.length?{value:void 0,done:!0}:(t=i(n,r),e.index+=t.length,{value:t,done:!1})}))},"3f8c":function(t,e){t.exports={}},"428f":function(t,e,n){var i=n("da84");t.exports=i},"44ad":function(t,e,n){var i=n("d039"),r=n("c6b6"),a="".split;t.exports=i((function(){return!Object("z").propertyIsEnumerable(0)}))?function(t){return"String"==r(t)?a.call(t,""):Object(t)}:Object},"44d2":function(t,e,n){var i=n("b622"),r=n("7c73"),a=n("9bf2"),s=i("unscopables"),o=Array.prototype;void 0==o[s]&&a.f(o,s,{configurable:!0,value:r(null)}),t.exports=function(t){o[s][t]=!0}},"44de":function(t,e,n){var i=n("da84");t.exports=function(t,e){var n=i.console;n&&n.error&&(1===arguments.length?n.error(t):n.error(t,e))}},"44e7":function(t,e,n){var i=n("861d"),r=n("c6b6"),a=n("b622"),s=a("match");t.exports=function(t){var e;return i(t)&&(void 0!==(e=t[s])?!!e:"RegExp"==r(t))}},4840:function(t,e,n){var i=n("825a"),r=n("1c0b"),a=n("b622"),s=a("species");t.exports=function(t,e){var n,a=i(t).constructor;return void 0===a||void 0==(n=i(a)[s])?e:r(n)}},4930:function(t,e,n){var i=n("605d"),r=n("2d00"),a=n("d039");t.exports=!!Object.getOwnPropertySymbols&&!a((function(){return!Symbol.sham&&(i?38===r:r>37&&r<41)}))},"4a53":function(t,e,n){var i={"./ar":["cfcc",0],"./ar.json":["cfcc",0],"./bg":["1f0e",1],"./bg.json":["1f0e",1],"./bn":["d2d5",2],"./bn.json":["d2d5",2],"./bs":["e06f",3],"./bs.json":["e06f",3],"./ca":["aeaf",4],"./ca.json":["aeaf",4],"./cs":["442f",5],"./cs.json":["442f",5],"./da":["93f6",6],"./da.json":["93f6",6],"./de":["44ff",7],"./de.json":["44ff",7],"./el":["bac9",8],"./el.json":["bac9",8],"./en":["0a96"],"./en.json":["0a96"],"./es":["3541",9],"./es.json":["3541",9],"./fa":["e4ca",10],"./fa.json":["e4ca",10],"./fr":["d791",11],"./fr.json":["d791",11],"./he":["5f2c",12],"./he.json":["5f2c",12],"./hr":["2364",13],"./hr.json":["2364",13],"./hu":["0ade",14],"./hu.json":["0ade",14],"./id":["ad69",15],"./id.json":["ad69",15],"./is":["3ada",16],"./is.json":["3ada",16],"./it":["1412",17],"./it.json":["1412",17],"./ja":["e135",18],"./ja.json":["e135",18],"./ka":["2969",19],"./ka.json":["2969",19],"./ko":["03b7",20],"./ko.json":["03b7",20],"./lt":["a2f0",21],"./lt.json":["a2f0",21],"./mn":["956e",22],"./mn.json":["956e",22],"./nl":["9f37",23],"./nl.json":["9f37",23],"./no":["9efb",24],"./no.json":["9efb",24],"./pl":["e44c",25],"./pl.json":["e44c",25],"./pt-br":["dac8",26],"./pt-br.json":["dac8",26],"./ro":["0946",27],"./ro.json":["0946",27],"./ru":["d82c",28],"./ru.json":["d82c",28],"./sk":["1037",29],"./sk.json":["1037",29],"./sl":["c17e",30],"./sl.json":["c17e",30],"./sq":["09b8",31],"./sq.json":["09b8",31],"./sr":["65a6",32],"./sr.json":["65a6",32],"./sv":["1fd1",33],"./sv.json":["1fd1",33],"./tr":["20e4",34],"./tr.json":["20e4",34],"./uk":["7dc6",35],"./uk.json":["7dc6",35],"./vi":["5465",36],"./vi.json":["5465",36],"./zh-cn":["8035",37],"./zh-cn.json":["8035",37],"./zh-hk":["a5dc",38],"./zh-hk.json":["a5dc",38]};function r(t){if(!n.o(i,t))return Promise.resolve().then((function(){var e=new Error("Cannot find module '"+t+"'");throw e.code="MODULE_NOT_FOUND",e}));var e=i[t],r=e[0];return Promise.all(e.slice(1).map(n.e)).then((function(){return n.t(r,3)}))}r.keys=function(){return Object.keys(i)},r.id="4a53",t.exports=r},"4d64":function(t,e,n){var i=n("fc6a"),r=n("50c4"),a=n("23cb"),s=function(t){return function(e,n,s){var o,l=i(e),u=r(l.length),c=a(s,u);if(t&&n!=n){while(u>c)if(o=l[c++],o!=o)return!0}else for(;u>c;c++)if((t||c in l)&&l[c]===n)return t||c||0;return!t&&-1}};t.exports={includes:s(!0),indexOf:s(!1)}},"4de4":function(t,e,n){"use strict";var i=n("23e7"),r=n("b727").filter,a=n("1dde"),s=a("filter");i({target:"Array",proto:!0,forced:!s},{filter:function(t){return r(this,t,arguments.length>1?arguments[1]:void 0)}})},"4df4":function(t,e,n){"use strict";var i=n("0366"),r=n("7b0b"),a=n("9bdd"),s=n("e95a"),o=n("50c4"),l=n("8418"),u=n("35a1");t.exports=function(t){var e,n,c,d,f,v,h=r(t),p="function"==typeof this?this:Array,y=arguments.length,m=y>1?arguments[1]:void 0,g=void 0!==m,b=u(h),w=0;if(g&&(m=i(m,y>2?arguments[2]:void 0,2)),void 0==b||p==Array&&s(b))for(e=o(h.length),n=new p(e);e>w;w++)v=g?m(h[w],w):h[w],l(n,w,v);else for(d=b.call(h),f=d.next,n=new p;!(c=f.call(d)).done;w++)v=g?a(d,m,[c.value,w],!0):c.value,l(n,w,v);return n.length=w,n}},"50c4":function(t,e,n){var i=n("a691"),r=Math.min;t.exports=function(t){return t>0?r(i(t),9007199254740991):0}},5135:function(t,e){var n={}.hasOwnProperty;t.exports=function(t,e){return n.call(t,e)}},5319:function(t,e,n){"use strict";var i=n("d784"),r=n("825a"),a=n("50c4"),s=n("a691"),o=n("1d80"),l=n("8aa5"),u=n("0cb2"),c=n("14c3"),d=Math.max,f=Math.min,v=function(t){return void 0===t?t:String(t)};i("replace",2,(function(t,e,n,i){var h=i.REGEXP_REPLACE_SUBSTITUTES_UNDEFINED_CAPTURE,p=i.REPLACE_KEEPS_$0,y=h?"$":"$0";return[function(n,i){var r=o(this),a=void 0==n?void 0:n[t];return void 0!==a?a.call(n,r,i):e.call(String(r),n,i)},function(t,i){if(!h&&p||"string"===typeof i&&-1===i.indexOf(y)){var o=n(e,t,this,i);if(o.done)return o.value}var m=r(t),g=String(this),b="function"===typeof i;b||(i=String(i));var w=m.global;if(w){var D=m.unicode;m.lastIndex=0}var _=[];while(1){var E=c(m,g);if(null===E)break;if(_.push(E),!w)break;var k=String(E[0]);""===k&&(m.lastIndex=l(g,a(m.lastIndex),D))}for(var S="",x=0,O=0;O<_.length;O++){E=_[O];for(var C=String(E[0]),T=d(f(s(E.index),g.length),0),M=[],j=1;j<E.length;j++)M.push(v(E[j]));var A=E.groups;if(b){var W=[C].concat(M,T,g);void 0!==A&&W.push(A);var V=String(i.apply(void 0,W))}else V=u(C,g,T,M,A,i);T>=x&&(S+=g.slice(x,T)+V,x=T+C.length)}return S+g.slice(x)}]}))},5530:function(t,e,n){"use strict";n.d(e,"a",(function(){return a}));n("b64b"),n("a4d3"),n("4de4"),n("e439"),n("159b"),n("dbb4");var i=n("ade3");function r(t,e){var n=Object.keys(t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(t);e&&(i=i.filter((function(e){return Object.getOwnPropertyDescriptor(t,e).enumerable}))),n.push.apply(n,i)}return n}function a(t){for(var e=1;e<arguments.length;e++){var n=null!=arguments[e]?arguments[e]:{};e%2?r(Object(n),!0).forEach((function(e){Object(i["a"])(t,e,n[e])})):Object.getOwnPropertyDescriptors?Object.defineProperties(t,Object.getOwnPropertyDescriptors(n)):r(Object(n)).forEach((function(e){Object.defineProperty(t,e,Object.getOwnPropertyDescriptor(n,e))}))}return t}},5692:function(t,e,n){var i=n("c430"),r=n("c6cd");(t.exports=function(t,e){return r[t]||(r[t]=void 0!==e?e:{})})("versions",[]).push({version:"3.9.1",mode:i?"pure":"global",copyright:"© 2021 Denis Pushkarev (zloirock.ru)"})},"56ef":function(t,e,n){var i=n("d066"),r=n("241c"),a=n("7418"),s=n("825a");t.exports=i("Reflect","ownKeys")||function(t){var e=r.f(s(t)),n=a.f;return n?e.concat(n(t)):e}},5899:function(t,e){t.exports="\t\n\v\f\r \u2028\u2029\ufeff"},"58a8":function(t,e,n){var i=n("1d80"),r=n("5899"),a="["+r+"]",s=RegExp("^"+a+a+"*"),o=RegExp(a+a+"*$"),l=function(t){return function(e){var n=String(i(e));return 1&t&&(n=n.replace(s,"")),2&t&&(n=n.replace(o,"")),n}};t.exports={start:l(1),end:l(2),trim:l(3)}},"5a34":function(t,e,n){var i=n("44e7");t.exports=function(t){if(i(t))throw TypeError("The method doesn't accept regular expressions");return t}},"5c6c":function(t,e){t.exports=function(t,e){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:e}}},"605d":function(t,e,n){var i=n("c6b6"),r=n("da84");t.exports="process"==i(r.process)},6062:function(t,e,n){"use strict";var i=n("6d61"),r=n("6566");t.exports=i("Set",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),r)},"60da":function(t,e,n){"use strict";var i=n("83ab"),r=n("d039"),a=n("df75"),s=n("7418"),o=n("d1e7"),l=n("7b0b"),u=n("44ad"),c=Object.assign,d=Object.defineProperty;t.exports=!c||r((function(){if(i&&1!==c({b:1},c(d({},"a",{enumerable:!0,get:function(){d(this,"b",{value:3,enumerable:!1})}}),{b:2})).b)return!0;var t={},e={},n=Symbol(),r="abcdefghijklmnopqrst";return t[n]=7,r.split("").forEach((function(t){e[t]=t})),7!=c({},t)[n]||a(c({},e)).join("")!=r}))?function(t,e){var n=l(t),r=arguments.length,c=1,d=s.f,f=o.f;while(r>c){var v,h=u(arguments[c++]),p=d?a(h).concat(d(h)):a(h),y=p.length,m=0;while(y>m)v=p[m++],i&&!f.call(h,v)||(n[v]=h[v])}return n}:c},"61f2":function(t,e,n){"use strict";n("12cd")},6547:function(t,e,n){var i=n("a691"),r=n("1d80"),a=function(t){return function(e,n){var a,s,o=String(r(e)),l=i(n),u=o.length;return l<0||l>=u?t?"":void 0:(a=o.charCodeAt(l),a<55296||a>56319||l+1===u||(s=o.charCodeAt(l+1))<56320||s>57343?t?o.charAt(l):a:t?o.slice(l,l+2):s-56320+(a-55296<<10)+65536)}};t.exports={codeAt:a(!1),charAt:a(!0)}},6566:function(t,e,n){"use strict";var i=n("9bf2").f,r=n("7c73"),a=n("e2cc"),s=n("0366"),o=n("19aa"),l=n("2266"),u=n("7dd0"),c=n("2626"),d=n("83ab"),f=n("f183").fastKey,v=n("69f3"),h=v.set,p=v.getterFor;t.exports={getConstructor:function(t,e,n,u){var c=t((function(t,i){o(t,c,e),h(t,{type:e,index:r(null),first:void 0,last:void 0,size:0}),d||(t.size=0),void 0!=i&&l(i,t[u],{that:t,AS_ENTRIES:n})})),v=p(e),y=function(t,e,n){var i,r,a=v(t),s=m(t,e);return s?s.value=n:(a.last=s={index:r=f(e,!0),key:e,value:n,previous:i=a.last,next:void 0,removed:!1},a.first||(a.first=s),i&&(i.next=s),d?a.size++:t.size++,"F"!==r&&(a.index[r]=s)),t},m=function(t,e){var n,i=v(t),r=f(e);if("F"!==r)return i.index[r];for(n=i.first;n;n=n.next)if(n.key==e)return n};return a(c.prototype,{clear:function(){var t=this,e=v(t),n=e.index,i=e.first;while(i)i.removed=!0,i.previous&&(i.previous=i.previous.next=void 0),delete n[i.index],i=i.next;e.first=e.last=void 0,d?e.size=0:t.size=0},delete:function(t){var e=this,n=v(e),i=m(e,t);if(i){var r=i.next,a=i.previous;delete n.index[i.index],i.removed=!0,a&&(a.next=r),r&&(r.previous=a),n.first==i&&(n.first=r),n.last==i&&(n.last=a),d?n.size--:e.size--}return!!i},forEach:function(t){var e,n=v(this),i=s(t,arguments.length>1?arguments[1]:void 0,3);while(e=e?e.next:n.first){i(e.value,e.key,this);while(e&&e.removed)e=e.previous}},has:function(t){return!!m(this,t)}}),a(c.prototype,n?{get:function(t){var e=m(this,t);return e&&e.value},set:function(t,e){return y(this,0===t?0:t,e)}}:{add:function(t){return y(this,t=0===t?0:t,t)}}),d&&i(c.prototype,"size",{get:function(){return v(this).size}}),c},setStrong:function(t,e,n){var i=e+" Iterator",r=p(e),a=p(i);u(t,e,(function(t,e){h(this,{type:i,target:t,state:r(t),kind:e,last:void 0})}),(function(){var t=a(this),e=t.kind,n=t.last;while(n&&n.removed)n=n.previous;return t.target&&(t.last=n=n?n.next:t.state.first)?"keys"==e?{value:n.key,done:!1}:"values"==e?{value:n.value,done:!1}:{value:[n.key,n.value],done:!1}:(t.target=void 0,{value:void 0,done:!0})}),n?"entries":"values",!n,!0),c(e)}}},"65f0":function(t,e,n){var i=n("861d"),r=n("e8b5"),a=n("b622"),s=a("species");t.exports=function(t,e){var n;return r(t)&&(n=t.constructor,"function"!=typeof n||n!==Array&&!r(n.prototype)?i(n)&&(n=n[s],null===n&&(n=void 0)):n=void 0),new(void 0===n?Array:n)(0===e?0:e)}},"69f3":function(t,e,n){var i,r,a,s=n("7f9a"),o=n("da84"),l=n("861d"),u=n("9112"),c=n("5135"),d=n("c6cd"),f=n("f772"),v=n("d012"),h=o.WeakMap,p=function(t){return a(t)?r(t):i(t,{})},y=function(t){return function(e){var n;if(!l(e)||(n=r(e)).type!==t)throw TypeError("Incompatible receiver, "+t+" required");return n}};if(s){var m=d.state||(d.state=new h),g=m.get,b=m.has,w=m.set;i=function(t,e){return e.facade=t,w.call(m,t,e),e},r=function(t){return g.call(m,t)||{}},a=function(t){return b.call(m,t)}}else{var D=f("state");v[D]=!0,i=function(t,e){return e.facade=t,u(t,D,e),e},r=function(t){return c(t,D)?t[D]:{}},a=function(t){return c(t,D)}}t.exports={set:i,get:r,has:a,enforce:p,getterFor:y}},"6d61":function(t,e,n){"use strict";var i=n("23e7"),r=n("da84"),a=n("94ca"),s=n("6eeb"),o=n("f183"),l=n("2266"),u=n("19aa"),c=n("861d"),d=n("d039"),f=n("1c7e"),v=n("d44e"),h=n("7156");t.exports=function(t,e,n){var p=-1!==t.indexOf("Map"),y=-1!==t.indexOf("Weak"),m=p?"set":"add",g=r[t],b=g&&g.prototype,w=g,D={},_=function(t){var e=b[t];s(b,t,"add"==t?function(t){return e.call(this,0===t?0:t),this}:"delete"==t?function(t){return!(y&&!c(t))&&e.call(this,0===t?0:t)}:"get"==t?function(t){return y&&!c(t)?void 0:e.call(this,0===t?0:t)}:"has"==t?function(t){return!(y&&!c(t))&&e.call(this,0===t?0:t)}:function(t,n){return e.call(this,0===t?0:t,n),this})},E=a(t,"function"!=typeof g||!(y||b.forEach&&!d((function(){(new g).entries().next()}))));if(E)w=n.getConstructor(e,t,p,m),o.REQUIRED=!0;else if(a(t,!0)){var k=new w,S=k[m](y?{}:-0,1)!=k,x=d((function(){k.has(1)})),O=f((function(t){new g(t)})),C=!y&&d((function(){var t=new g,e=5;while(e--)t[m](e,e);return!t.has(-0)}));O||(w=e((function(e,n){u(e,w,t);var i=h(new g,e,w);return void 0!=n&&l(n,i[m],{that:i,AS_ENTRIES:p}),i})),w.prototype=b,b.constructor=w),(x||C)&&(_("delete"),_("has"),p&&_("get")),(C||S)&&_(m),y&&b.clear&&delete b.clear}return D[t]=w,i({global:!0,forced:w!=g},D),v(w,t),y||n.setStrong(w,t,p),w}},"6eeb":function(t,e,n){var i=n("da84"),r=n("9112"),a=n("5135"),s=n("ce4e"),o=n("8925"),l=n("69f3"),u=l.get,c=l.enforce,d=String(String).split("String");(t.exports=function(t,e,n,o){var l,u=!!o&&!!o.unsafe,f=!!o&&!!o.enumerable,v=!!o&&!!o.noTargetGet;"function"==typeof n&&("string"!=typeof e||a(n,"name")||r(n,"name",e),l=c(n),l.source||(l.source=d.join("string"==typeof e?e:""))),t!==i?(u?!v&&t[e]&&(f=!0):delete t[e],f?t[e]=n:r(t,e,n)):f?t[e]=n:s(e,n)})(Function.prototype,"toString",(function(){return"function"==typeof this&&u(this).source||o(this)}))},7156:function(t,e,n){var i=n("861d"),r=n("d2bb");t.exports=function(t,e,n){var a,s;return r&&"function"==typeof(a=e.constructor)&&a!==n&&i(s=a.prototype)&&s!==n.prototype&&r(t,s),t}},7371:function(t,e,n){},7418:function(t,e){e.f=Object.getOwnPropertySymbols},"746f":function(t,e,n){var i=n("428f"),r=n("5135"),a=n("e538"),s=n("9bf2").f;t.exports=function(t){var e=i.Symbol||(i.Symbol={});r(e,t)||s(e,t,{value:a.f(t)})}},7839:function(t,e){t.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]},"7b0b":function(t,e,n){var i=n("1d80");t.exports=function(t){return Object(i(t))}},"7c73":function(t,e,n){var i,r=n("825a"),a=n("37e8"),s=n("7839"),o=n("d012"),l=n("1be4"),u=n("cc12"),c=n("f772"),d=">",f="<",v="prototype",h="script",p=c("IE_PROTO"),y=function(){},m=function(t){return f+h+d+t+f+"/"+h+d},g=function(t){t.write(m("")),t.close();var e=t.parentWindow.Object;return t=null,e},b=function(){var t,e=u("iframe"),n="java"+h+":";return e.style.display="none",l.appendChild(e),e.src=String(n),t=e.contentWindow.document,t.open(),t.write(m("document.F=Object")),t.close(),t.F},w=function(){try{i=document.domain&&new ActiveXObject("htmlfile")}catch(e){}w=i?g(i):b();var t=s.length;while(t--)delete w[v][s[t]];return w()};o[p]=!0,t.exports=Object.create||function(t,e){var n;return null!==t?(y[v]=r(t),n=new y,y[v]=null,n[p]=t):n=w(),void 0===e?n:a(n,e)}},"7db0":function(t,e,n){"use strict";var i=n("23e7"),r=n("b727").find,a=n("44d2"),s="find",o=!0;s in[]&&Array(1)[s]((function(){o=!1})),i({target:"Array",proto:!0,forced:o},{find:function(t){return r(this,t,arguments.length>1?arguments[1]:void 0)}}),a(s)},"7dd0":function(t,e,n){"use strict";var i=n("23e7"),r=n("9ed3"),a=n("e163"),s=n("d2bb"),o=n("d44e"),l=n("9112"),u=n("6eeb"),c=n("b622"),d=n("c430"),f=n("3f8c"),v=n("ae93"),h=v.IteratorPrototype,p=v.BUGGY_SAFARI_ITERATORS,y=c("iterator"),m="keys",g="values",b="entries",w=function(){return this};t.exports=function(t,e,n,c,v,D,_){r(n,e,c);var E,k,S,x=function(t){if(t===v&&j)return j;if(!p&&t in T)return T[t];switch(t){case m:return function(){return new n(this,t)};case g:return function(){return new n(this,t)};case b:return function(){return new n(this,t)}}return function(){return new n(this)}},O=e+" Iterator",C=!1,T=t.prototype,M=T[y]||T["@@iterator"]||v&&T[v],j=!p&&M||x(v),A="Array"==e&&T.entries||M;if(A&&(E=a(A.call(new t)),h!==Object.prototype&&E.next&&(d||a(E)===h||(s?s(E,h):"function"!=typeof E[y]&&l(E,y,w)),o(E,O,!0,!0),d&&(f[O]=w))),v==g&&M&&M.name!==g&&(C=!0,j=function(){return M.call(this)}),d&&!_||T[y]===j||l(T,y,j),f[e]=j,v)if(k={values:x(g),keys:D?j:x(m),entries:x(b)},_)for(S in k)(p||C||!(S in T))&&u(T,S,k[S]);else i({target:e,proto:!0,forced:p||C},k);return k}},"7f9a":function(t,e,n){var i=n("da84"),r=n("8925"),a=i.WeakMap;t.exports="function"===typeof a&&/native code/.test(r(a))},"81d5":function(t,e,n){"use strict";var i=n("7b0b"),r=n("23cb"),a=n("50c4");t.exports=function(t){var e=i(this),n=a(e.length),s=arguments.length,o=r(s>1?arguments[1]:void 0,n),l=s>2?arguments[2]:void 0,u=void 0===l?n:r(l,n);while(u>o)e[o++]=t;return e}},"825a":function(t,e,n){var i=n("861d");t.exports=function(t){if(!i(t))throw TypeError(String(t)+" is not an object");return t}},"83ab":function(t,e,n){var i=n("d039");t.exports=!i((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]}))},8418:function(t,e,n){"use strict";var i=n("c04e"),r=n("9bf2"),a=n("5c6c");t.exports=function(t,e,n){var s=i(e);s in t?r.f(t,s,a(0,n)):t[s]=n}},"857a":function(t,e,n){var i=n("1d80"),r=/"/g;t.exports=function(t,e,n,a){var s=String(i(t)),o="<"+e;return""!==n&&(o+=" "+n+'="'+String(a).replace(r,""")+'"'),o+">"+s+"</"+e+">"}},"861d":function(t,e){t.exports=function(t){return"object"===typeof t?null!==t:"function"===typeof t}},8875:function(t,e,n){var i,r,a;(function(n,s){r=[],i=s,a="function"===typeof i?i.apply(e,r):i,void 0===a||(t.exports=a)})("undefined"!==typeof self&&self,(function(){function t(){var e=Object.getOwnPropertyDescriptor(document,"currentScript");if(!e&&"currentScript"in document&&document.currentScript)return document.currentScript;if(e&&e.get!==t&&document.currentScript)return document.currentScript;try{throw new Error}catch(v){var n,i,r,a=/.*at [^(]*\((.*):(.+):(.+)\)$/gi,s=/@([^@]*):(\d+):(\d+)\s*$/gi,o=a.exec(v.stack)||s.exec(v.stack),l=o&&o[1]||!1,u=o&&o[2]||!1,c=document.location.href.replace(document.location.hash,""),d=document.getElementsByTagName("script");l===c&&(n=document.documentElement.outerHTML,i=new RegExp("(?:[^\\n]+?\\n){0,"+(u-2)+"}[^<]*<script>([\\d\\D]*?)<\\/script>[\\d\\D]*","i"),r=n.replace(i,"$1").trim());for(var f=0;f<d.length;f++){if("interactive"===d[f].readyState)return d[f];if(d[f].src===l)return d[f];if(l===c&&d[f].innerHTML&&d[f].innerHTML.trim()===r)return d[f]}return null}}return t}))},8925:function(t,e,n){var i=n("c6cd"),r=Function.toString;"function"!=typeof i.inspectSource&&(i.inspectSource=function(t){return r.call(t)}),t.exports=i.inspectSource},"8aa5":function(t,e,n){"use strict";var i=n("6547").charAt;t.exports=function(t,e,n){return e+(n?i(t,e).length:1)}},"8bbf":function(e,n){e.exports=t},"90e3":function(t,e){var n=0,i=Math.random();t.exports=function(t){return"Symbol("+String(void 0===t?"":t)+")_"+(++n+i).toString(36)}},9112:function(t,e,n){var i=n("83ab"),r=n("9bf2"),a=n("5c6c");t.exports=i?function(t,e,n){return r.f(t,e,a(1,n))}:function(t,e,n){return t[e]=n,t}},9263:function(t,e,n){"use strict";var i=n("ad6d"),r=n("9f7f"),a=RegExp.prototype.exec,s=String.prototype.replace,o=a,l=function(){var t=/a/,e=/b*/g;return a.call(t,"a"),a.call(e,"a"),0!==t.lastIndex||0!==e.lastIndex}(),u=r.UNSUPPORTED_Y||r.BROKEN_CARET,c=void 0!==/()??/.exec("")[1],d=l||c||u;d&&(o=function(t){var e,n,r,o,d=this,f=u&&d.sticky,v=i.call(d),h=d.source,p=0,y=t;return f&&(v=v.replace("y",""),-1===v.indexOf("g")&&(v+="g"),y=String(t).slice(d.lastIndex),d.lastIndex>0&&(!d.multiline||d.multiline&&"\n"!==t[d.lastIndex-1])&&(h="(?: "+h+")",y=" "+y,p++),n=new RegExp("^(?:"+h+")",v)),c&&(n=new RegExp("^"+h+"$(?!\\s)",v)),l&&(e=d.lastIndex),r=a.call(f?n:d,y),f?r?(r.input=r.input.slice(p),r[0]=r[0].slice(p),r.index=d.lastIndex,d.lastIndex+=r[0].length):d.lastIndex=0:l&&r&&(d.lastIndex=d.global?r.index+r[0].length:e),c&&r&&r.length>1&&s.call(r[0],n,(function(){for(o=1;o<arguments.length-2;o++)void 0===arguments[o]&&(r[o]=void 0)})),r}),t.exports=o},"94ca":function(t,e,n){var i=n("d039"),r=/#|\.prototype\./,a=function(t,e){var n=o[s(t)];return n==u||n!=l&&("function"==typeof e?i(e):!!e)},s=a.normalize=function(t){return String(t).replace(r,".").toLowerCase()},o=a.data={},l=a.NATIVE="N",u=a.POLYFILL="P";t.exports=a},"95dd":function(t,e,n){"use strict";n("7371")},9735:function(t,e,n){"use strict";n("2170")},"99af":function(t,e,n){"use strict";var i=n("23e7"),r=n("d039"),a=n("e8b5"),s=n("861d"),o=n("7b0b"),l=n("50c4"),u=n("8418"),c=n("65f0"),d=n("1dde"),f=n("b622"),v=n("2d00"),h=f("isConcatSpreadable"),p=9007199254740991,y="Maximum allowed index exceeded",m=v>=51||!r((function(){var t=[];return t[h]=!1,t.concat()[0]!==t})),g=d("concat"),b=function(t){if(!s(t))return!1;var e=t[h];return void 0!==e?!!e:a(t)},w=!m||!g;i({target:"Array",proto:!0,forced:w},{concat:function(t){var e,n,i,r,a,s=o(this),d=c(s,0),f=0;for(e=-1,i=arguments.length;e<i;e++)if(a=-1===e?s:arguments[e],b(a)){if(r=l(a.length),f+r>p)throw TypeError(y);for(n=0;n<r;n++,f++)n in a&&u(d,f,a[n])}else{if(f>=p)throw TypeError(y);u(d,f++,a)}return d.length=f,d}})},"9bdd":function(t,e,n){var i=n("825a"),r=n("2a62");t.exports=function(t,e,n,a){try{return a?e(i(n)[0],n[1]):e(n)}catch(s){throw r(t),s}}},"9bf2":function(t,e,n){var i=n("83ab"),r=n("0cfb"),a=n("825a"),s=n("c04e"),o=Object.defineProperty;e.f=i?o:function(t,e,n){if(a(t),e=s(e,!0),a(n),r)try{return o(t,e,n)}catch(i){}if("get"in n||"set"in n)throw TypeError("Accessors not supported");return"value"in n&&(t[e]=n.value),t}},"9ed3":function(t,e,n){"use strict";var i=n("ae93").IteratorPrototype,r=n("7c73"),a=n("5c6c"),s=n("d44e"),o=n("3f8c"),l=function(){return this};t.exports=function(t,e,n){var u=e+" Iterator";return t.prototype=r(i,{next:a(1,n)}),s(t,u,!1,!0),o[u]=l,t}},"9f7f":function(t,e,n){"use strict";var i=n("d039");function r(t,e){return RegExp(t,e)}e.UNSUPPORTED_Y=i((function(){var t=r("a","y");return t.lastIndex=2,null!=t.exec("abcd")})),e.BROKEN_CARET=i((function(){var t=r("^r","gy");return t.lastIndex=2,null!=t.exec("str")}))},a15b:function(t,e,n){"use strict";var i=n("23e7"),r=n("44ad"),a=n("fc6a"),s=n("a640"),o=[].join,l=r!=Object,u=s("join",",");i({target:"Array",proto:!0,forced:l||!u},{join:function(t){return o.call(a(this),void 0===t?",":t)}})},a434:function(t,e,n){"use strict";var i=n("23e7"),r=n("23cb"),a=n("a691"),s=n("50c4"),o=n("7b0b"),l=n("65f0"),u=n("8418"),c=n("1dde"),d=c("splice"),f=Math.max,v=Math.min,h=9007199254740991,p="Maximum allowed length exceeded";i({target:"Array",proto:!0,forced:!d},{splice:function(t,e){var n,i,c,d,y,m,g=o(this),b=s(g.length),w=r(t,b),D=arguments.length;if(0===D?n=i=0:1===D?(n=0,i=b-w):(n=D-2,i=v(f(a(e),0),b-w)),b+n-i>h)throw TypeError(p);for(c=l(g,i),d=0;d<i;d++)y=w+d,y in g&&u(c,d,g[y]);if(c.length=i,n<i){for(d=w;d<b-i;d++)y=d+i,m=d+n,y in g?g[m]=g[y]:delete g[m];for(d=b;d>b-i+n;d--)delete g[d-1]}else if(n>i)for(d=b-i;d>w;d--)y=d+i-1,m=d+n-1,y in g?g[m]=g[y]:delete g[m];for(d=0;d<n;d++)g[d+w]=arguments[d+2];return g.length=b-i+n,c}})},a4b4:function(t,e,n){var i=n("342f");t.exports=/web0s(?!.*chrome)/i.test(i)},a4d3:function(t,e,n){"use strict";var i=n("23e7"),r=n("da84"),a=n("d066"),s=n("c430"),o=n("83ab"),l=n("4930"),u=n("fdbf"),c=n("d039"),d=n("5135"),f=n("e8b5"),v=n("861d"),h=n("825a"),p=n("7b0b"),y=n("fc6a"),m=n("c04e"),g=n("5c6c"),b=n("7c73"),w=n("df75"),D=n("241c"),_=n("057f"),E=n("7418"),k=n("06cf"),S=n("9bf2"),x=n("d1e7"),O=n("9112"),C=n("6eeb"),T=n("5692"),M=n("f772"),j=n("d012"),A=n("90e3"),W=n("b622"),V=n("e538"),I=n("746f"),P=n("d44e"),H=n("69f3"),L=n("b727").forEach,N=M("hidden"),Y="Symbol",F="prototype",R=W("toPrimitive"),$=H.set,B=H.getterFor(Y),z=Object[F],U=r.Symbol,G=a("JSON","stringify"),q=k.f,X=S.f,K=_.f,J=x.f,Q=T("symbols"),Z=T("op-symbols"),tt=T("string-to-symbol-registry"),et=T("symbol-to-string-registry"),nt=T("wks"),it=r.QObject,rt=!it||!it[F]||!it[F].findChild,at=o&&c((function(){return 7!=b(X({},"a",{get:function(){return X(this,"a",{value:7}).a}})).a}))?function(t,e,n){var i=q(z,e);i&&delete z[e],X(t,e,n),i&&t!==z&&X(z,e,i)}:X,st=function(t,e){var n=Q[t]=b(U[F]);return $(n,{type:Y,tag:t,description:e}),o||(n.description=e),n},ot=u?function(t){return"symbol"==typeof t}:function(t){return Object(t)instanceof U},lt=function(t,e,n){t===z&<(Z,e,n),h(t);var i=m(e,!0);return h(n),d(Q,i)?(n.enumerable?(d(t,N)&&t[N][i]&&(t[N][i]=!1),n=b(n,{enumerable:g(0,!1)})):(d(t,N)||X(t,N,g(1,{})),t[N][i]=!0),at(t,i,n)):X(t,i,n)},ut=function(t,e){h(t);var n=y(e),i=w(n).concat(ht(n));return L(i,(function(e){o&&!dt.call(n,e)||lt(t,e,n[e])})),t},ct=function(t,e){return void 0===e?b(t):ut(b(t),e)},dt=function(t){var e=m(t,!0),n=J.call(this,e);return!(this===z&&d(Q,e)&&!d(Z,e))&&(!(n||!d(this,e)||!d(Q,e)||d(this,N)&&this[N][e])||n)},ft=function(t,e){var n=y(t),i=m(e,!0);if(n!==z||!d(Q,i)||d(Z,i)){var r=q(n,i);return!r||!d(Q,i)||d(n,N)&&n[N][i]||(r.enumerable=!0),r}},vt=function(t){var e=K(y(t)),n=[];return L(e,(function(t){d(Q,t)||d(j,t)||n.push(t)})),n},ht=function(t){var e=t===z,n=K(e?Z:y(t)),i=[];return L(n,(function(t){!d(Q,t)||e&&!d(z,t)||i.push(Q[t])})),i};if(l||(U=function(){if(this instanceof U)throw TypeError("Symbol is not a constructor");var t=arguments.length&&void 0!==arguments[0]?String(arguments[0]):void 0,e=A(t),n=function(t){this===z&&n.call(Z,t),d(this,N)&&d(this[N],e)&&(this[N][e]=!1),at(this,e,g(1,t))};return o&&rt&&at(z,e,{configurable:!0,set:n}),st(e,t)},C(U[F],"toString",(function(){return B(this).tag})),C(U,"withoutSetter",(function(t){return st(A(t),t)})),x.f=dt,S.f=lt,k.f=ft,D.f=_.f=vt,E.f=ht,V.f=function(t){return st(W(t),t)},o&&(X(U[F],"description",{configurable:!0,get:function(){return B(this).description}}),s||C(z,"propertyIsEnumerable",dt,{unsafe:!0}))),i({global:!0,wrap:!0,forced:!l,sham:!l},{Symbol:U}),L(w(nt),(function(t){I(t)})),i({target:Y,stat:!0,forced:!l},{for:function(t){var e=String(t);if(d(tt,e))return tt[e];var n=U(e);return tt[e]=n,et[n]=e,n},keyFor:function(t){if(!ot(t))throw TypeError(t+" is not a symbol");if(d(et,t))return et[t]},useSetter:function(){rt=!0},useSimple:function(){rt=!1}}),i({target:"Object",stat:!0,forced:!l,sham:!o},{create:ct,defineProperty:lt,defineProperties:ut,getOwnPropertyDescriptor:ft}),i({target:"Object",stat:!0,forced:!l},{getOwnPropertyNames:vt,getOwnPropertySymbols:ht}),i({target:"Object",stat:!0,forced:c((function(){E.f(1)}))},{getOwnPropertySymbols:function(t){return E.f(p(t))}}),G){var pt=!l||c((function(){var t=U();return"[null]"!=G([t])||"{}"!=G({a:t})||"{}"!=G(Object(t))}));i({target:"JSON",stat:!0,forced:pt},{stringify:function(t,e,n){var i,r=[t],a=1;while(arguments.length>a)r.push(arguments[a++]);if(i=e,(v(e)||void 0!==t)&&!ot(t))return f(e)||(e=function(t,e){if("function"==typeof i&&(e=i.call(this,t,e)),!ot(e))return e}),r[1]=e,G.apply(null,r)}})}U[F][R]||O(U[F],R,U[F].valueOf),P(U,Y),j[N]=!0},a630:function(t,e,n){var i=n("23e7"),r=n("4df4"),a=n("1c7e"),s=!a((function(t){Array.from(t)}));i({target:"Array",stat:!0,forced:s},{from:r})},a640:function(t,e,n){"use strict";var i=n("d039");t.exports=function(t,e){var n=[][t];return!!n&&i((function(){n.call(null,e||function(){throw 1},1)}))}},a691:function(t,e){var n=Math.ceil,i=Math.floor;t.exports=function(t){return isNaN(t=+t)?0:(t>0?i:n)(t)}},a9e3:function(t,e,n){"use strict";var i=n("83ab"),r=n("da84"),a=n("94ca"),s=n("6eeb"),o=n("5135"),l=n("c6b6"),u=n("7156"),c=n("c04e"),d=n("d039"),f=n("7c73"),v=n("241c").f,h=n("06cf").f,p=n("9bf2").f,y=n("58a8").trim,m="Number",g=r[m],b=g.prototype,w=l(f(b))==m,D=function(t){var e,n,i,r,a,s,o,l,u=c(t,!1);if("string"==typeof u&&u.length>2)if(u=y(u),e=u.charCodeAt(0),43===e||45===e){if(n=u.charCodeAt(2),88===n||120===n)return NaN}else if(48===e){switch(u.charCodeAt(1)){case 66:case 98:i=2,r=49;break;case 79:case 111:i=8,r=55;break;default:return+u}for(a=u.slice(2),s=a.length,o=0;o<s;o++)if(l=a.charCodeAt(o),l<48||l>r)return NaN;return parseInt(a,i)}return+u};if(a(m,!g(" 0o1")||!g("0b1")||g("+0x1"))){for(var _,E=function(t){var e=arguments.length<1?0:t,n=this;return n instanceof E&&(w?d((function(){b.valueOf.call(n)})):l(n)!=m)?u(new g(D(e)),n,E):D(e)},k=i?v(g):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger,fromString,range".split(","),S=0;k.length>S;S++)o(g,_=k[S])&&!o(E,_)&&p(E,_,h(g,_));E.prototype=b,b.constructor=E,s(r,m,E)}},ab13:function(t,e,n){var i=n("b622"),r=i("match");t.exports=function(t){var e=/./;try{"/./"[t](e)}catch(n){try{return e[r]=!1,"/./"[t](e)}catch(i){}}return!1}},ac1f:function(t,e,n){"use strict";var i=n("23e7"),r=n("9263");i({target:"RegExp",proto:!0,forced:/./.exec!==r},{exec:r})},ad6d:function(t,e,n){"use strict";var i=n("825a");t.exports=function(){var t=i(this),e="";return t.global&&(e+="g"),t.ignoreCase&&(e+="i"),t.multiline&&(e+="m"),t.dotAll&&(e+="s"),t.unicode&&(e+="u"),t.sticky&&(e+="y"),e}},ade3:function(t,e,n){"use strict";function i(t,e,n){return e in t?Object.defineProperty(t,e,{value:n,enumerable:!0,configurable:!0,writable:!0}):t[e]=n,t}n.d(e,"a",(function(){return i}))},ae93:function(t,e,n){"use strict";var i,r,a,s=n("d039"),o=n("e163"),l=n("9112"),u=n("5135"),c=n("b622"),d=n("c430"),f=c("iterator"),v=!1,h=function(){return this};[].keys&&(a=[].keys(),"next"in a?(r=o(o(a)),r!==Object.prototype&&(i=r)):v=!0);var p=void 0==i||s((function(){var t={};return i[f].call(t)!==t}));p&&(i={}),d&&!p||u(i,f)||l(i,f,h),t.exports={IteratorPrototype:i,BUGGY_SAFARI_ITERATORS:v}},af03:function(t,e,n){var i=n("d039");t.exports=function(t){return i((function(){var e=""[t]('"');return e!==e.toLowerCase()||e.split('"').length>3}))}},b041:function(t,e,n){"use strict";var i=n("00ee"),r=n("f5df");t.exports=i?{}.toString:function(){return"[object "+r(this)+"]"}},b0c0:function(t,e,n){var i=n("83ab"),r=n("9bf2").f,a=Function.prototype,s=a.toString,o=/^\s*function ([^ (]*)/,l="name";i&&!(l in a)&&r(a,l,{configurable:!0,get:function(){try{return s.call(this).match(o)[1]}catch(t){return""}}})},b2b6:function(t,e,n){},b575:function(t,e,n){var i,r,a,s,o,l,u,c,d=n("da84"),f=n("06cf").f,v=n("2cf4").set,h=n("1cdc"),p=n("a4b4"),y=n("605d"),m=d.MutationObserver||d.WebKitMutationObserver,g=d.document,b=d.process,w=d.Promise,D=f(d,"queueMicrotask"),_=D&&D.value;_||(i=function(){var t,e;y&&(t=b.domain)&&t.exit();while(r){e=r.fn,r=r.next;try{e()}catch(n){throw r?s():a=void 0,n}}a=void 0,t&&t.enter()},h||y||p||!m||!g?w&&w.resolve?(u=w.resolve(void 0),c=u.then,s=function(){c.call(u,i)}):s=y?function(){b.nextTick(i)}:function(){v.call(d,i)}:(o=!0,l=g.createTextNode(""),new m(i).observe(l,{characterData:!0}),s=function(){l.data=o=!o})),t.exports=_||function(t){var e={fn:t,next:void 0};a&&(a.next=e),r||(r=e,s()),a=e}},b622:function(t,e,n){var i=n("da84"),r=n("5692"),a=n("5135"),s=n("90e3"),o=n("4930"),l=n("fdbf"),u=r("wks"),c=i.Symbol,d=l?c:c&&c.withoutSetter||s;t.exports=function(t){return a(u,t)&&(o||"string"==typeof u[t])||(o&&a(c,t)?u[t]=c[t]:u[t]=d("Symbol."+t)),u[t]}},b64b:function(t,e,n){var i=n("23e7"),r=n("7b0b"),a=n("df75"),s=n("d039"),o=s((function(){a(1)}));i({target:"Object",stat:!0,forced:o},{keys:function(t){return a(r(t))}})},b727:function(t,e,n){var i=n("0366"),r=n("44ad"),a=n("7b0b"),s=n("50c4"),o=n("65f0"),l=[].push,u=function(t){var e=1==t,n=2==t,u=3==t,c=4==t,d=6==t,f=7==t,v=5==t||d;return function(h,p,y,m){for(var g,b,w=a(h),D=r(w),_=i(p,y,3),E=s(D.length),k=0,S=m||o,x=e?S(h,E):n||f?S(h,0):void 0;E>k;k++)if((v||k in D)&&(g=D[k],b=_(g,k,w),t))if(e)x[k]=b;else if(b)switch(t){case 3:return!0;case 5:return g;case 6:return k;case 2:l.call(x,g)}else switch(t){case 4:return!1;case 7:l.call(x,g)}return d?-1:u||c?c:x}};t.exports={forEach:u(0),map:u(1),filter:u(2),some:u(3),every:u(4),find:u(5),findIndex:u(6),filterOut:u(7)}},bb2f:function(t,e,n){var i=n("d039");t.exports=!i((function(){return Object.isExtensible(Object.preventExtensions({}))}))},bee2:function(t,e,n){"use strict";function i(t,e){for(var n=0;n<e.length;n++){var i=e[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(t,i.key,i)}}function r(t,e,n){return e&&i(t.prototype,e),n&&i(t,n),t}n.d(e,"a",(function(){return r}))},c04e:function(t,e,n){var i=n("861d");t.exports=function(t,e){if(!i(t))return t;var n,r;if(e&&"function"==typeof(n=t.toString)&&!i(r=n.call(t)))return r;if("function"==typeof(n=t.valueOf)&&!i(r=n.call(t)))return r;if(!e&&"function"==typeof(n=t.toString)&&!i(r=n.call(t)))return r;throw TypeError("Can't convert object to primitive value")}},c430:function(t,e){t.exports=!1},c6b6:function(t,e){var n={}.toString;t.exports=function(t){return n.call(t).slice(8,-1)}},c6cd:function(t,e,n){var i=n("da84"),r=n("ce4e"),a="__core-js_shared__",s=i[a]||r(a,{});t.exports=s},c740:function(t,e,n){"use strict";var i=n("23e7"),r=n("b727").findIndex,a=n("44d2"),s="findIndex",o=!0;s in[]&&Array(1)[s]((function(){o=!1})),i({target:"Array",proto:!0,forced:o},{findIndex:function(t){return r(this,t,arguments.length>1?arguments[1]:void 0)}}),a(s)},c8ba:function(t,e){var n;n=function(){return this}();try{n=n||new Function("return this")()}catch(i){"object"===typeof window&&(n=window)}t.exports=n},c96a:function(t,e,n){"use strict";var i=n("23e7"),r=n("857a"),a=n("af03");i({target:"String",proto:!0,forced:a("small")},{small:function(){return r(this,"small","","")}})},ca84:function(t,e,n){var i=n("5135"),r=n("fc6a"),a=n("4d64").indexOf,s=n("d012");t.exports=function(t,e){var n,o=r(t),l=0,u=[];for(n in o)!i(s,n)&&i(o,n)&&u.push(n);while(e.length>l)i(o,n=e[l++])&&(~a(u,n)||u.push(n));return u}},caad:function(t,e,n){"use strict";var i=n("23e7"),r=n("4d64").includes,a=n("44d2");i({target:"Array",proto:!0},{includes:function(t){return r(this,t,arguments.length>1?arguments[1]:void 0)}}),a("includes")},cb29:function(t,e,n){var i=n("23e7"),r=n("81d5"),a=n("44d2");i({target:"Array",proto:!0},{fill:r}),a("fill")},cc12:function(t,e,n){var i=n("da84"),r=n("861d"),a=i.document,s=r(a)&&r(a.createElement);t.exports=function(t){return s?a.createElement(t):{}}},cca6:function(t,e,n){var i=n("23e7"),r=n("60da");i({target:"Object",stat:!0,forced:Object.assign!==r},{assign:r})},cdf9:function(t,e,n){var i=n("825a"),r=n("861d"),a=n("f069");t.exports=function(t,e){if(i(t),r(e)&&e.constructor===t)return e;var n=a.f(t),s=n.resolve;return s(e),n.promise}},ce4e:function(t,e,n){var i=n("da84"),r=n("9112");t.exports=function(t,e){try{r(i,t,e)}catch(n){i[t]=e}return e}},d012:function(t,e){t.exports={}},d039:function(t,e){t.exports=function(t){try{return!!t()}catch(e){return!0}}},d066:function(t,e,n){var i=n("428f"),r=n("da84"),a=function(t){return"function"==typeof t?t:void 0};t.exports=function(t,e){return arguments.length<2?a(i[t])||a(r[t]):i[t]&&i[t][e]||r[t]&&r[t][e]}},d1e7:function(t,e,n){"use strict";var i={}.propertyIsEnumerable,r=Object.getOwnPropertyDescriptor,a=r&&!i.call({1:2},1);e.f=a?function(t){var e=r(this,t);return!!e&&e.enumerable}:i},d28b:function(t,e,n){var i=n("746f");i("iterator")},d2bb:function(t,e,n){var i=n("825a"),r=n("3bbe");t.exports=Object.setPrototypeOf||("__proto__"in{}?function(){var t,e=!1,n={};try{t=Object.getOwnPropertyDescriptor(Object.prototype,"__proto__").set,t.call(n,[]),e=n instanceof Array}catch(a){}return function(n,a){return i(n),r(a),e?t.call(n,a):n.__proto__=a,n}}():void 0)},d3b7:function(t,e,n){var i=n("00ee"),r=n("6eeb"),a=n("b041");i||r(Object.prototype,"toString",a,{unsafe:!0})},d44e:function(t,e,n){var i=n("9bf2").f,r=n("5135"),a=n("b622"),s=a("toStringTag");t.exports=function(t,e,n){t&&!r(t=n?t:t.prototype,s)&&i(t,s,{configurable:!0,value:e})}},d4ec:function(t,e,n){"use strict";function i(t,e){if(!(t instanceof e))thro
|
new TypeError("Cannot call a class as a function")}n.d(e,"a",(function(){return i}))},d58f:function(t,e,n){var i=n("1c0b"),r=n("7b0b"),a=n("44ad"),s=n("50c4"),o=function(t){return function(e,n,o,l){i(n);var u=r(e),c=a(u),d=s(u.length),f=t?d-1:0,v=t?-1:1;if(o<2)while(1){if(f in c){l=c[f],f+=v;break}if(f+=v,t?f<0:d<=f)throw TypeError("Reduce of empty array with no initial value")}for(;t?f>=0:d>f;f+=v)f in c&&(l=n(l,c[f],f,u));return l}};t.exports={left:o(!1),right:o(!0)}},d784:function(t,e,n){"use strict";n("ac1f");var i=n("6eeb"),r=n("d039"),a=n("b622"),s=n("9263"),o=n("9112"),l=a("species"),u=!r((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$<a>")})),c=function(){return"$0"==="a".replace(/./,"$0")}(),d=a("replace"),f=function(){return!!/./[d]&&""===/./[d]("a","$0")}(),v=!r((function(){var t=/(?:)/,e=t.exec;t.exec=function(){return e.apply(this,arguments)};var n="ab".split(t);return 2!==n.length||"a"!==n[0]||"b"!==n[1]}));t.exports=function(t,e,n,d){var h=a(t),p=!r((function(){var e={};return e[h]=function(){return 7},7!=""[t](e)})),y=p&&!r((function(){var e=!1,n=/a/;return"split"===t&&(n={},n.constructor={},n.constructor[l]=function(){return n},n.flags="",n[h]=/./[h]),n.exec=function(){return e=!0,null},n[h](""),!e}));if(!p||!y||"replace"===t&&(!u||!c||f)||"split"===t&&!v){var m=/./[h],g=n(h,""[t],(function(t,e,n,i,r){return e.exec===s?p&&!r?{done:!0,value:m.call(e,n,i)}:{done:!0,value:t.call(n,e,i)}:{done:!1}}),{REPLACE_KEEPS_$0:c,REGEXP_REPLACE_SUBSTITUTES_UNDEFINED_CAPTURE:f}),b=g[0],w=g[1];i(String.prototype,t,b),i(RegExp.prototype,h,2==e?function(t,e){return w.call(t,this,e)}:function(t){return w.call(t,this)})}d&&o(RegExp.prototype[h],"sham",!0)}},d81d:function(t,e,n){"use strict";var i=n("23e7"),r=n("b727").map,a=n("1dde"),s=a("map");i({target:"Array",proto:!0,forced:!s},{map:function(t){return r(this,t,arguments.length>1?arguments[1]:void 0)}})},da84:function(t,e,n){(function(e){var n=function(t){return t&&t.Math==Math&&t};t.exports=n("object"==typeof globalThis&&globalThis)||n("object"==typeof window&&window)||n("object"==typeof self&&self)||n("object"==typeof e&&e)||function(){return this}()||Function("return this")()}).call(this,n("c8ba"))},dbb4:function(t,e,n){var i=n("23e7"),r=n("83ab"),a=n("56ef"),s=n("fc6a"),o=n("06cf"),l=n("8418");i({target:"Object",stat:!0,sham:!r},{getOwnPropertyDescriptors:function(t){var e,n,i=s(t),r=o.f,u=a(i),c={},d=0;while(u.length>d)n=r(i,e=u[d++]),void 0!==n&&l(c,e,n);return c}})},dc34:function(t,e,n){"use strict";n("b2b6")},ddb0:function(t,e,n){var i=n("da84"),r=n("fdbc"),a=n("e260"),s=n("9112"),o=n("b622"),l=o("iterator"),u=o("toStringTag"),c=a.values;for(var d in r){var f=i[d],v=f&&f.prototype;if(v){if(v[l]!==c)try{s(v,l,c)}catch(p){v[l]=c}if(v[u]||s(v,u,d),r[d])for(var h in a)if(v[h]!==a[h])try{s(v,h,a[h])}catch(p){v[h]=a[h]}}}},df75:function(t,e,n){var i=n("ca84"),r=n("7839");t.exports=Object.keys||function(t){return i(t,r)}},e01a:function(t,e,n){"use strict";var i=n("23e7"),r=n("83ab"),a=n("da84"),s=n("5135"),o=n("861d"),l=n("9bf2").f,u=n("e893"),c=a.Symbol;if(r&&"function"==typeof c&&(!("description"in c.prototype)||void 0!==c().description)){var d={},f=function(){var t=arguments.length<1||void 0===arguments[0]?void 0:String(arguments[0]),e=this instanceof f?new c(t):void 0===t?c():c(t);return""===t&&(d[e]=!0),e};u(f,c);var v=f.prototype=c.prototype;v.constructor=f;var h=v.toString,p="Symbol(test)"==String(c("test")),y=/^Symbol\((.*)\)[^)]+$/;l(v,"description",{configurable:!0,get:function(){var t=o(this)?this.valueOf():this,e=h.call(t);if(s(d,t))return"";var n=p?e.slice(7,-1):e.replace(y,"$1");return""===n?void 0:n}}),i({global:!0,forced:!0},{Symbol:f})}},e163:function(t,e,n){var i=n("5135"),r=n("7b0b"),a=n("f772"),s=n("e177"),o=a("IE_PROTO"),l=Object.prototype;t.exports=s?Object.getPrototypeOf:function(t){return t=r(t),i(t,o)?t[o]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?l:null}},e177:function(t,e,n){var i=n("d039");t.exports=!i((function(){function t(){}return t.prototype.constructor=null,Object.getPrototypeOf(new t)!==t.prototype}))},e260:function(t,e,n){"use strict";var i=n("fc6a"),r=n("44d2"),a=n("3f8c"),s=n("69f3"),o=n("7dd0"),l="Array Iterator",u=s.set,c=s.getterFor(l);t.exports=o(Array,"Array",(function(t,e){u(this,{type:l,target:i(t),index:0,kind:e})}),(function(){var t=c(this),e=t.target,n=t.kind,i=t.index++;return!e||i>=e.length?(t.target=void 0,{value:void 0,done:!0}):"keys"==n?{value:i,done:!1}:"values"==n?{value:e[i],done:!1}:{value:[i,e[i]],done:!1}}),"values"),a.Arguments=a.Array,r("keys"),r("values"),r("entries")},e2cc:function(t,e,n){var i=n("6eeb");t.exports=function(t,e,n){for(var r in e)i(t,r,e[r],n);return t}},e439:function(t,e,n){var i=n("23e7"),r=n("d039"),a=n("fc6a"),s=n("06cf").f,o=n("83ab"),l=r((function(){s(1)})),u=!o||l;i({target:"Object",stat:!0,forced:u,sham:!o},{getOwnPropertyDescriptor:function(t,e){return s(a(t),e)}})},e538:function(t,e,n){var i=n("b622");e.f=i},e667:function(t,e){t.exports=function(t){try{return{error:!1,value:t()}}catch(e){return{error:!0,value:e}}}},e6cf:function(t,e,n){"use strict";var i,r,a,s,o=n("23e7"),l=n("c430"),u=n("da84"),c=n("d066"),d=n("fea9"),f=n("6eeb"),v=n("e2cc"),h=n("d44e"),p=n("2626"),y=n("861d"),m=n("1c0b"),g=n("19aa"),b=n("8925"),w=n("2266"),D=n("1c7e"),_=n("4840"),E=n("2cf4").set,k=n("b575"),S=n("cdf9"),x=n("44de"),O=n("f069"),C=n("e667"),T=n("69f3"),M=n("94ca"),j=n("b622"),A=n("605d"),W=n("2d00"),V=j("species"),I="Promise",P=T.get,H=T.set,L=T.getterFor(I),N=d,Y=u.TypeError,F=u.document,R=u.process,$=c("fetch"),B=O.f,z=B,U=!!(F&&F.createEvent&&u.dispatchEvent),G="function"==typeof PromiseRejectionEvent,q="unhandledrejection",X="rejectionhandled",K=0,J=1,Q=2,Z=1,tt=2,et=M(I,(function(){var t=b(N)!==String(N);if(!t){if(66===W)return!0;if(!A&&!G)return!0}if(l&&!N.prototype["finally"])return!0;if(W>=51&&/native code/.test(N))return!1;var e=N.resolve(1),n=function(t){t((function(){}),(function(){}))},i=e.constructor={};return i[V]=n,!(e.then((function(){}))instanceof n)})),nt=et||!D((function(t){N.all(t)["catch"]((function(){}))})),it=function(t){var e;return!(!y(t)||"function"!=typeof(e=t.then))&&e},rt=function(t,e){if(!t.notified){t.notified=!0;var n=t.reactions;k((function(){var i=t.value,r=t.state==J,a=0;while(n.length>a){var s,o,l,u=n[a++],c=r?u.ok:u.fail,d=u.resolve,f=u.reject,v=u.domain;try{c?(r||(t.rejection===tt&<(t),t.rejection=Z),!0===c?s=i:(v&&v.enter(),s=c(i),v&&(v.exit(),l=!0)),s===u.promise?f(Y("Promise-chain cycle")):(o=it(s))?o.call(s,d,f):d(s)):f(i)}catch(h){v&&!l&&v.exit(),f(h)}}t.reactions=[],t.notified=!1,e&&!t.rejection&&st(t)}))}},at=function(t,e,n){var i,r;U?(i=F.createEvent("Event"),i.promise=e,i.reason=n,i.initEvent(t,!1,!0),u.dispatchEvent(i)):i={promise:e,reason:n},!G&&(r=u["on"+t])?r(i):t===q&&x("Unhandled promise rejection",n)},st=function(t){E.call(u,(function(){var e,n=t.facade,i=t.value,r=ot(t);if(r&&(e=C((function(){A?R.emit("unhandledRejection",i,n):at(q,n,i)})),t.rejection=A||ot(t)?tt:Z,e.error))throw e.value}))},ot=function(t){return t.rejection!==Z&&!t.parent},lt=function(t){E.call(u,(function(){var e=t.facade;A?R.emit("rejectionHandled",e):at(X,e,t.value)}))},ut=function(t,e,n){return function(i){t(e,i,n)}},ct=function(t,e,n){t.done||(t.done=!0,n&&(t=n),t.value=e,t.state=Q,rt(t,!0))},dt=function(t,e,n){if(!t.done){t.done=!0,n&&(t=n);try{if(t.facade===e)throw Y("Promise can't be resolved itself");var i=it(e);i?k((function(){var n={done:!1};try{i.call(e,ut(dt,n,t),ut(ct,n,t))}catch(r){ct(n,r,t)}})):(t.value=e,t.state=J,rt(t,!1))}catch(r){ct({done:!1},r,t)}}};et&&(N=function(t){g(this,N,I),m(t),i.call(this);var e=P(this);try{t(ut(dt,e),ut(ct,e))}catch(n){ct(e,n)}},i=function(t){H(this,{type:I,done:!1,notified:!1,parent:!1,reactions:[],rejection:!1,state:K,value:void 0})},i.prototype=v(N.prototype,{then:function(t,e){var n=L(this),i=B(_(this,N));return i.ok="function"!=typeof t||t,i.fail="function"==typeof e&&e,i.domain=A?R.domain:void 0,n.parent=!0,n.reactions.push(i),n.state!=K&&rt(n,!1),i.promise},catch:function(t){return this.then(void 0,t)}}),r=function(){var t=new i,e=P(t);this.promise=t,this.resolve=ut(dt,e),this.reject=ut(ct,e)},O.f=B=function(t){return t===N||t===a?new r(t):z(t)},l||"function"!=typeof d||(s=d.prototype.then,f(d.prototype,"then",(function(t,e){var n=this;return new N((function(t,e){s.call(n,t,e)})).then(t,e)}),{unsafe:!0}),"function"==typeof $&&o({global:!0,enumerable:!0,forced:!0},{fetch:function(t){return S(N,$.apply(u,arguments))}}))),o({global:!0,wrap:!0,forced:et},{Promise:N}),h(N,I,!1,!0),p(I),a=c(I),o({target:I,stat:!0,forced:et},{reject:function(t){var e=B(this);return e.reject.call(void 0,t),e.promise}}),o({target:I,stat:!0,forced:l||et},{resolve:function(t){return S(l&&this===a?N:this,t)}}),o({target:I,stat:!0,forced:nt},{all:function(t){var e=this,n=B(e),i=n.resolve,r=n.reject,a=C((function(){var n=m(e.resolve),a=[],s=0,o=1;w(t,(function(t){var l=s++,u=!1;a.push(void 0),o++,n.call(e,t).then((function(t){u||(u=!0,a[l]=t,--o||i(a))}),r)})),--o||i(a)}));return a.error&&r(a.value),n.promise},race:function(t){var e=this,n=B(e),i=n.reject,r=C((function(){var r=m(e.resolve);w(t,(function(t){r.call(e,t).then(n.resolve,i)}))}));return r.error&&i(r.value),n.promise}})},e893:function(t,e,n){var i=n("5135"),r=n("56ef"),a=n("06cf"),s=n("9bf2");t.exports=function(t,e){for(var n=r(e),o=s.f,l=a.f,u=0;u<n.length;u++){var c=n[u];i(t,c)||o(t,c,l(e,c))}}},e8b5:function(t,e,n){var i=n("c6b6");t.exports=Array.isArray||function(t){return"Array"==i(t)}},e95a:function(t,e,n){var i=n("b622"),r=n("3f8c"),a=i("iterator"),s=Array.prototype;t.exports=function(t){return void 0!==t&&(r.Array===t||s[a]===t)}},f069:function(t,e,n){"use strict";var i=n("1c0b"),r=function(t){var e,n;this.promise=new t((function(t,i){if(void 0!==e||void 0!==n)throw TypeError("Bad Promise constructor");e=t,n=i})),this.resolve=i(e),this.reject=i(n)};t.exports.f=function(t){return new r(t)}},f183:function(t,e,n){var i=n("d012"),r=n("861d"),a=n("5135"),s=n("9bf2").f,o=n("90e3"),l=n("bb2f"),u=o("meta"),c=0,d=Object.isExtensible||function(){return!0},f=function(t){s(t,u,{value:{objectID:"O"+ ++c,weakData:{}}})},v=function(t,e){if(!r(t))return"symbol"==typeof t?t:("string"==typeof t?"S":"P")+t;if(!a(t,u)){if(!d(t))return"F";if(!e)return"E";f(t)}return t[u].objectID},h=function(t,e){if(!a(t,u)){if(!d(t))return!0;if(!e)return!1;f(t)}return t[u].weakData},p=function(t){return l&&y.REQUIRED&&d(t)&&!a(t,u)&&f(t),t},y=t.exports={REQUIRED:!1,fastKey:v,getWeakData:h,onFreeze:p};i[u]=!0},f5df:function(t,e,n){var i=n("00ee"),r=n("c6b6"),a=n("b622"),s=a("toStringTag"),o="Arguments"==r(function(){return arguments}()),l=function(t,e){try{return t[e]}catch(n){}};t.exports=i?r:function(t){var e,n,i;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(n=l(e=Object(t),s))?n:o?r(e):"Object"==(i=r(e))&&"function"==typeof e.callee?"Arguments":i}},f772:function(t,e,n){var i=n("5692"),r=n("90e3"),a=i("keys");t.exports=function(t){return a[t]||(a[t]=r(t))}},fb15:function(t,e,n){"use strict";if(n.r(e),"undefined"!==typeof window){var i=window.document.currentScript,r=n("8875");i=r(),"currentScript"in document||Object.defineProperty(document,"currentScript",{get:r});var a=i&&i.src.match(/(.+\/)[^/]+\.js(\?.*)?$/);a&&(n.p=a[1])}var s=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{ref:"vuecal",staticClass:"vuecal__flex vuecal",class:t.cssClasses,attrs:{column:"",lang:t.locale}},[n("vuecal-header",{attrs:{options:t.$props,"edit-events":t.editEvents,"view-props":{views:t.views,weekDaysInHeader:t.weekDaysInHeader},"week-days":t.weekDays,"has-splits":t.hasSplits,"day-splits":t.daySplits,"switch-to-narrower-view":t.switchToNarrowerView},scopedSlots:t._u([{key:"arrow-prev",fn:function(){return[t._t("arrow-prev",[t._v(" "),n("i",{staticClass:"angle"}),t._v(" ")])]},proxy:!0},{key:"arrow-next",fn:function(){return[t._t("arrow-next",[t._v(" "),n("i",{staticClass:"angle"}),t._v(" ")])]},proxy:!0},{key:"today-button",fn:function(){return[t._t("today-button",[n("span",{staticClass:"default"},[t._v(t._s(t.texts.today))])])]},proxy:!0},{key:"title",fn:function(){return[t._t("title",[t._v(t._s(t.viewTitle))],{title:t.viewTitle,view:t.view})]},proxy:!0},{key:"weekday-heading",fn:function(e){var n=e.heading,i=e.view;return[t._t("weekday-heading",null,{heading:n,view:i})]}},{key:"split-label",fn:function(e){var n=e.split;return[t._t("split-label",null,{split:n,view:t.view.id})]}}],null,!0)}),t.hideBody?t._e():n("div",{staticClass:"vuecal__flex vuecal__body",attrs:{grow:""}},[n("transition",{attrs:{name:"slide-fade--"+t.transitionDirection,appear:t.transitions}},[n("div",{key:!!t.transitions&&t.view.id,staticClass:"vuecal__flex",staticStyle:{"min-width":"100%"},attrs:{column:""}},[t.showAllDayEvents&&t.hasTimeColumn&&(!t.cellOrSplitMinWidth||t.isDayView&&!t.minSplitWidth)?n("all-day-bar",t._b({scopedSlots:t._u([{key:"event",fn:function(e){var i=e.event,r=e.view;return[t._t("event",[t.editEvents.title&&i.titleEditable?n("div",{staticClass:"vuecal__event-title vuecal__event-title--edit",attrs:{contenteditable:""},domProps:{innerHTML:t._s(i.title)},on:{blur:function(e){return t.onEventTitleBlur(e,i)}}}):i.title?n("div",{staticClass:"vuecal__event-title",domProps:{innerHTML:t._s(i.title)}}):t._e(),!i.content||t.hasShortEvents||t.isShortMonthView?t._e():n("div",{staticClass:"vuecal__event-content",domProps:{innerHTML:t._s(i.content)}})],{view:r,event:i})]}}],null,!0)},"all-day-bar",t.allDayBar,!1)):t._e(),n("div",{staticClass:"vuecal__bg",class:{vuecal__flex:!t.hasTimeColumn},attrs:{column:""}},[n("div",{staticClass:"vuecal__flex",attrs:{row:"",grow:""}},[t.hasTimeColumn?n("div",{staticClass:"vuecal__time-column"},[t.showAllDayEvents&&t.cellOrSplitMinWidth&&(!t.isDayView||t.minSplitWidth)?n("div",{staticClass:"vuecal__all-day-text",style:{height:t.allDayBar.height}},[n("span",[t._v(t._s(t.texts.allDay))])]):t._e(),t._l(t.timeCells,(function(e,i){return n("div",{key:i,staticClass:"vuecal__time-cell",style:"height: "+t.timeCellHeight+"px"},[t._t("time-cell",[n("span",{staticClass:"vuecal__time-cell-line"}),n("span",{staticClass:"vuecal__time-cell-label"},[t._v(t._s(e.label))])],{hours:e.hours,minutes:e.minutes})],2)}))],2):t._e(),t.showWeekNumbers&&t.isMonthView?n("div",{staticClass:"vuecal__flex vuecal__week-numbers",attrs:{column:""}},t._l(6,(function(e){return n("div",{key:e,staticClass:"vuecal__flex vuecal__week-number-cell",attrs:{grow:""}},[t._t("week-number-cell",[t._v(t._s(t.getWeekNumber(e-1)))],{week:t.getWeekNumber(e-1)})],2)})),0):t._e(),n("div",{staticClass:"vuecal__flex vuecal__cells",class:t.view.id+"-view",attrs:{grow:"",wrap:!t.cellOrSplitMinWidth||!t.isWeekView,column:!!t.cellOrSplitMinWidth}},[t.cellOrSplitMinWidth&&t.isWeekView?n("weekdays-headings",{style:t.cellOrSplitMinWidth?"min-width: "+t.cellOrSplitMinWidth+"px":"",attrs:{"transition-direction":t.transitionDirection,"week-days":t.weekDays,"switch-to-narrower-view":t.switchToNarrowerView},scopedSlots:t._u([{key:"weekday-heading",fn:function(e){var n=e.heading,i=e.view;return[t._t("weekday-heading",null,{heading:n,view:i})]}},{key:"split-label",fn:function(e){var n=e.split;return[t._t("split-label",null,{split:n,view:t.view.id})]}}],null,!0)}):t.hasSplits&&t.stickySplitLabels&&t.minSplitWidth?n("div",{staticClass:"vuecal__flex vuecal__split-days-headers",style:t.cellOrSplitMinWidth?"min-width: "+t.cellOrSplitMinWidth+"px":""},t._l(t.daySplits,(function(e,i){return n("div",{key:i,staticClass:"day-split-header",class:e.class||!1},[t._t("split-label",[t._v(t._s(e.label))],{split:e,view:t.view.id})],2)})),0):t._e(),t.showAllDayEvents&&t.hasTimeColumn&&(t.isWeekView&&t.cellOrSplitMinWidth||t.isDayView&&t.hasSplits&&t.minSplitWidth)?n("all-day-bar",t._b({scopedSlots:t._u([{key:"event",fn:function(e){var i=e.event,r=e.view;return[t._t("event",[t.editEvents.title&&i.titleEditable?n("div",{staticClass:"vuecal__event-title vuecal__event-title--edit",attrs:{contenteditable:""},domProps:{innerHTML:t._s(i.title)},on:{blur:function(e){return t.onEventTitleBlur(e,i)}}}):i.title?n("div",{staticClass:"vuecal__event-title",domProps:{innerHTML:t._s(i.title)}}):t._e(),!i.content||t.hasShortEvents||t.isShortMonthView?t._e():n("div",{staticClass:"vuecal__event-content",domProps:{innerHTML:t._s(i.content)}})],{view:r,event:i})]}}],null,!0)},"all-day-bar",t.allDayBar,!1)):t._e(),n("div",{ref:"cells",staticClass:"vuecal__flex",style:t.cellOrSplitMinWidth?"min-width: "+t.cellOrSplitMinWidth+"px":"",attrs:{grow:"",wrap:!t.cellOrSplitMinWidth||!t.isWeekView}},t._l(t.viewCells,(function(e,i){return n("vuecal-cell",{key:i,attrs:{options:t.$props,"edit-events":t.editEvents,data:e,"cell-width":t.hideWeekdays.length&&(t.isWeekView||t.isMonthView)&&t.cellWidth,"min-timestamp":t.minTimestamp,"max-timestamp":t.maxTimestamp,"cell-splits":t.hasSplits&&t.daySplits||[]},scopedSlots:t._u([{key:"cell-content",fn:function(i){var r=i.events,a=i.split,s=i.selectCell;return[t._t("cell-content",[a&&!t.stickySplitLabels?n("div",{staticClass:"split-label",domProps:{innerHTML:t._s(a.label)}}):t._e(),e.content?n("div",{staticClass:"vuecal__cell-date",domProps:{innerHTML:t._s(e.content)}}):t._e(),(t.isMonthView&&!t.eventsOnMonthView||t.isYearsOrYearView&&t.eventsCountOnYearView)&&r.length?n("div",{staticClass:"vuecal__cell-events-count"},[t._t("events-count",[t._v(t._s(r.length))],{view:t.view,events:r})],2):t._e(),!t.cellOrSplitHasEvents(r,a)&&t.isWeekOrDayView?n("div",{staticClass:"vuecal__no-event"},[t._t("no-event",[t._v(t._s(t.texts.noEvent))])],2):t._e()],{cell:e,view:t.view,goNarrower:s,events:r})]}},{key:"event",fn:function(i){var r=i.event,a=i.view;return[t._t("event",[t.editEvents.title&&r.titleEditable?n("div",{staticClass:"vuecal__event-title vuecal__event-title--edit",attrs:{contenteditable:""},domProps:{innerHTML:t._s(r.title)},on:{blur:function(e){return t.onEventTitleBlur(e,r)}}}):r.title?n("div",{staticClass:"vuecal__event-title",domProps:{innerHTML:t._s(r.title)}}):t._e(),!t.time||r.allDay||t.isMonthView&&(r.allDay||"short"===t.showAllDayEvents)||t.isShortMonthView?t._e():n("div",{staticClass:"vuecal__event-time"},[t._v(t._s(t.utils.date.formatTime(r.start,t.TimeFormat))),r.endTimeMinutes?n("span",[t._v(" - "+t._s(t.utils.date.formatTime(r.end,t.TimeFormat,null,!0)))]):t._e(),r.daysCount>1&&(r.segments[e.formattedDate]||{}).isFirstDay?n("small",{staticClass:"days-to-end"},[t._v(" +"+t._s(r.daysCount-1)+t._s((t.texts.day[0]||"").toLowerCase()))]):t._e()]),!r.content||t.isMonthView&&r.allDay&&"short"===t.showAllDayEvents||t.isShortMonthView?t._e():n("div",{staticClass:"vuecal__event-content",domProps:{innerHTML:t._s(r.content)}})],{view:a,event:r})]}}],null,!0)},[t._t("default")],2)})),1)],1)])])],1)]),t.ready?t._e():n("div",{staticClass:"vuecal__scrollbar-check"},[n("div")])],1)],1)},o=[],l=n("ade3");function u(t,e){(null==e||e>t.length)&&(e=t.length);for(var n=0,i=new Array(e);n<e;n++)i[n]=t[n];return i}function c(t){if(Array.isArray(t))return u(t)}n("a4d3"),n("e01a"),n("d3b7"),n("d28b"),n("3ca3"),n("e260"),n("ddb0"),n("a630");function d(t){if("undefined"!==typeof Symbol&&Symbol.iterator in Object(t))return Array.from(t)}n("fb6a"),n("b0c0");function f(t,e){if(t){if("string"===typeof t)return u(t,e);var n=Object.prototype.toString.call(t).slice(8,-1);return"Object"===n&&t.constructor&&(n=t.constructor.name),"Map"===n||"Set"===n?Array.from(t):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?u(t,e):void 0}}function v(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function h(t){return c(t)||d(t)||f(t)||v()}function p(t){return p="function"===typeof Symbol&&"symbol"===typeof Symbol.iterator?function(t){return typeof t}:function(t){return t&&"function"===typeof Symbol&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},p(t)}var y,m,g,b,w,D,_,E=n("5530"),k=(n("cb29"),n("a9e3"),n("cca6"),n("e6cf"),n("caad"),n("99af"),n("a15b"),n("2532"),n("d81d"),n("4de4"),n("159b"),n("7db0"),n("1276"),n("ac1f"),n("5319"),n("38cf"),n("b64b"),n("c96a"),n("13d5"),n("d4ec")),S=n("bee2"),x=(n("25f0"),{}),O={},C=function(){function t(e){var n=arguments.length>1&&void 0!==arguments[1]&&arguments[1];Object(k["a"])(this,t),Object(l["a"])(this,"texts",{}),Object(l["a"])(this,"dateToMinutes",(function(t){return 60*t.getHours()+t.getMinutes()+t.getSeconds()/60})),b=this,this._texts=e,n||!Date||Date.prototype.addDays||this._initDatePrototypes()}return Object(S["a"])(t,[{key:"_initDatePrototypes",value:function(){Date.prototype.addDays=function(t){return b.addDays(this,t)},Date.prototype.subtractDays=function(t){return b.subtractDays(this,t)},Date.prototype.addHours=function(t){return b.addHours(this,t)},Date.prototype.subtractHours=function(t){return b.subtractHours(this,t)},Date.prototype.addMinutes=function(t){return b.addMinutes(this,t)},Date.prototype.subtractMinutes=function(t){return b.subtractMinutes(this,t)},Date.prototype.getWeek=function(){return b.getWeek(this)},Date.prototype.isToday=function(){return b.isToday(this)},Date.prototype.isLeapYear=function(){return b.isLeapYear(this)},Date.prototype.format=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"YYYY-MM-DD";return b.formatDate(this,t)},Date.prototype.formatTime=function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:"HH:mm";return b.formatTime(this,t)}}},{key:"removePrototypes",value:function(){delete Date.prototype.addDays,delete Date.prototype.subtractDays,delete Date.prototype.addHours,delete Date.prototype.subtractHours,delete Date.prototype.addMinutes,delete Date.prototype.subtractMinutes,delete Date.prototype.getWeek,delete Date.prototype.isToday,delete Date.prototype.isLeapYear,delete Date.prototype.format,delete Date.prototype.formatTime}},{key:"updateTexts",value:function(t){this._texts=t}},{key:"_todayFormatted",value:function(){return m!==(new Date).getDate()&&(y=new Date,m=y.getDate(),g="".concat(y.getFullYear(),"-").concat(y.getMonth(),"-").concat(y.getDate())),g}},{key:"addDays",value:function(t,e){var n=new Date(t.valueOf());return n.setDate(n.getDate()+e),n}},{key:"subtractDays",value:function(t,e){var n=new Date(t.valueOf());return n.setDate(n.getDate()-e),n}},{key:"addHours",value:function(t,e){var n=new Date(t.valueOf());return n.setHours(n.getHours()+e),n}},{key:"subtractHours",value:function(t,e){var n=new Date(t.valueOf());return n.setHours(n.getHours()-e),n}},{key:"addMinutes",value:function(t,e){var n=new Date(t.valueOf());return n.setMinutes(n.getMinutes()+e),n}},{key:"subtractMinutes",value:function(t,e){var n=new Date(t.valueOf());return n.setMinutes(n.getMinutes()-e),n}},{key:"getWeek",value:function(t){var e=new Date(Date.UTC(t.getFullYear(),t.getMonth(),t.getDate())),n=e.getUTCDay()||7;e.setUTCDate(e.getUTCDate()+4-n);var i=new Date(Date.UTC(e.getUTCFullYear(),0,1));return Math.ceil(((e-i)/864e5+1)/7)}},{key:"isToday",value:function(t){return"".concat(t.getFullYear(),"-").concat(t.getMonth(),"-").concat(t.getDate())===this._todayFormatted()}},{key:"isLeapYear",value:function(t){var e=t.getFullYear();return!(e%400)||e%100&&!(e%4)}},{key:"getPreviousFirstDayOfWeek",value:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null,e=arguments.length>1?arguments[1]:void 0,n=t&&new Date(t.valueOf())||new Date,i=e?7:6;return n.setDate(n.getDate()-(n.getDay()+i)%7),n}},{key:"stringToDate",value:function(t){return t instanceof Date?t:(10===t.length&&(t+=" 00:00"),new Date(t.replace(/-/g,"/")))}},{key:"countDays",value:function(t,e){"string"===typeof t&&(t=t.replace(/-/g,"/")),"string"===typeof e&&(e=e.replace(/-/g,"/")),t=new Date(t).setHours(0,0,0,0),e=new Date(e).setHours(0,0,1,0);var n=60*(new Date(e).getTimezoneOffset()-new Date(t).getTimezoneOffset())*1e3;return Math.ceil((e-t-n)/864e5)}},{key:"datesInSameTimeStep",value:function(t,e,n){return Math.abs(t.getTime()-e.getTime())<=60*n*1e3}},{key:"formatDate",value:function(t){var e=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"YYYY-MM-DD",i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null;if(i||(i=this._texts),n||(n="YYYY-MM-DD"),"YYYY-MM-DD"===n)return this.formatDateLite(t);x={},O={};var r={YYYY:function(){return e._hydrateDateObject(t,i).YYYY},YY:function(){return e._hydrateDateObject(t,i).YY()},M:function(){return e._hydrateDateObject(t,i).M},MM:function(){return e._hydrateDateObject(t,i).MM()},MMM:function(){return e._hydrateDateObject(t,i).MMM()},MMMM:function(){return e._hydrateDateObject(t,i).MMMM()},MMMMG:function(){return e._hydrateDateObject(t,i).MMMMG()},D:function(){return e._hydrateDateObject(t,i).D},DD:function(){return e._hydrateDateObject(t,i).DD()},S:function(){return e._hydrateDateObject(t,i).S()},d:function(){return e._hydrateDateObject(t,i).d},dd:function(){return e._hydrateDateObject(t,i).dd()},ddd:function(){return e._hydrateDateObject(t,i).ddd()},dddd:function(){return e._hydrateDateObject(t,i).dddd()},HH:function(){return e._hydrateTimeObject(t,i).HH},H:function(){return e._hydrateTimeObject(t,i).H},hh:function(){return e._hydrateTimeObject(t,i).hh},h:function(){return e._hydrateTimeObject(t,i).h},am:function(){return e._hydrateTimeObject(t,i).am},AM:function(){return e._hydrateTimeObject(t,i).AM},mm:function(){return e._hydrateTimeObject(t,i).mm},m:function(){return e._hydrateTimeObject(t,i).m}};return n.replace(/(\{[a-zA-Z]+\}|[a-zA-Z]+)/g,(function(t,e){var n=r[e.replace(/\{|\}/g,"")];return void 0!==n?n():e}))}},{key:"formatDateLite",value:function(t){var e=t.getMonth()+1,n=t.getDate();return"".concat(t.getFullYear(),"-").concat(e<10?"0":"").concat(e,"-").concat(n<10?"0":"").concat(n)}},{key:"formatTime",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:"HH:mm",n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:null,i=arguments.length>3&&void 0!==arguments[3]&&arguments[3],r=!1;if(i){var a=[t.getHours(),t.getMinutes(),t.getSeconds()],s=a[0],o=a[1],l=a[2];s+o+l===141&&(r=!0)}if(t instanceof Date&&"HH:mm"===e)return r?"24:00":this.formatTimeLite(t);O={},n||(n=this._texts);var u=this._hydrateTimeObject(t,n),c=e.replace(/(\{[a-zA-Z]+\}|[a-zA-Z]+)/g,(function(t,e){var n=u[e.replace(/\{|\}/g,"")];return void 0!==n?n:e}));return r?c.replace("23:59","24:00"):c}},{key:"formatTimeLite",value:function(t){var e=t.getHours(),n=t.getMinutes();return"".concat((e<10?"0":"")+e,":").concat((n<10?"0":"")+n)}},{key:"_nth",value:function(t){if(t>3&&t<21)return"th";switch(t%10){case 1:return"st";case 2:return"nd";case 3:return"rd";default:return"th"}}},{key:"_hydrateDateObject",value:function(t,e){var n=this;if(x.D)return x;var i=t.getFullYear(),r=t.getMonth()+1,a=t.getDate(),s=t.getDay(),o=(s-1+7)%7;return x={YYYY:i,YY:function(){return i.toString().substring(2)},M:r,MM:function(){return(r<10?"0":"")+r},MMM:function(){return e.months[r-1].substring(0,3)},MMMM:function(){return e.months[r-1]},MMMMG:function(){return(e.monthsGenitive||e.months)[r-1]},D:a,DD:function(){return(a<10?"0":"")+a},S:function(){return n._nth(a)},d:o+1,dd:function(){return e.weekDays[o][0]},ddd:function(){return e.weekDays[o].substr(0,3)},dddd:function(){return e.weekDays[o]}},x}},{key:"_hydrateTimeObject",value:function(t,e){if(O.am)return O;var n,i;t instanceof Date?(n=t.getHours(),i=t.getMinutes()):(n=Math.floor(t/60),i=Math.floor(t%60));var r=n%12?n%12:12,a=(e||{am:"am",pm:"pm"})[24===n||n<12?"am":"pm"];return O={H:n,h:r,HH:(n<10?"0":"")+n,hh:(r<10?"0":"")+r,am:a,AM:a.toUpperCase(),m:i,mm:(i<10?"0":"")+i},O}}]),t}(),T=1440,M=function t(e){var n=this;Object(k["a"])(this,t),Object(l["a"])(this,"_vuecal",null),Object(l["a"])(this,"selectCell",(function(){var t=arguments.length>0&&void 0!==arguments[0]&&arguments[0],e=arguments.length>1?arguments[1]:void 0,i=arguments.length>2?arguments[2]:void 0;n._vuecal.$emit("cell-click",i?{date:e,split:i}:e),n._vuecal.clickToNavigate||t?n._vuecal.switchToNarrowerView():n._vuecal.dblclickToNavigate&&"ontouchstart"in window&&(n._vuecal.domEvents.dblTapACell.taps++,setTimeout((function(){return n._vuecal.domEvents.dblTapACell.taps=0}),n._vuecal.domEvents.dblTapACell.timeout),n._vuecal.domEvents.dblTapACell.taps>=2&&(n._vuecal.domEvents.dblTapACell.taps=0,n._vuecal.switchToNarrowerView(),n._vuecal.$emit("cell-dblclick",i?{date:e,split:i}:e)))})),Object(l["a"])(this,"keyPressEnterCell",(function(t,e){n._vuecal.$emit("cell-keypress-enter",e?{date:t,split:e}:t),n._vuecal.switchToNarrowerView()})),Object(l["a"])(this,"getPosition",(function(t){var e=n._vuecal.$refs.cells.getBoundingClientRect(),i=e.left,r=e.top,a="ontouchstart"in window&&t.touches?t.touches[0]:t,s=a.clientX,o=a.clientY;return{x:s-i,y:o-r}})),Object(l["a"])(this,"minutesAtCursor",(function(t){var e=0,i={x:0,y:0},r=n._vuecal.$props,a=r.timeStep,s=r.timeCellHeight,o=r.timeFrom;return"number"===typeof t?e=t:"object"===p(t)&&(i=n.getPosition(t),e=Math.round(i.y*a/parseInt(s)+o)),{minutes:Math.max(Math.min(e,T),0),cursorCoords:i}})),this._vuecal=e},j=(n("6062"),n("a434"),n("c740"),n("8bbf")),A=n.n(j),W=2,V=1440,I=function(){function t(e,n){Object(k["a"])(this,t),Object(l["a"])(this,"_vuecal",null),Object(l["a"])(this,"eventDefaults",{_eid:null,start:"",startTimeMinutes:0,end:"",endTimeMinutes:0,title:"",content:"",background:!1,allDay:!1,segments:null,repeat:null,daysCount:1,deletable:!0,deleting:!1,titleEditable:!0,resizable:!0,resizing:!1,draggable:!0,dragging:!1,draggingStatic:!1,focused:!1,class:""}),this._vuecal=e,w=n}return Object(S["a"])(t,[{key:"createAnEvent",value:function(t,e,n){var i=this;if("string"===typeof t&&(t=w.stringToDate(t)),!(t instanceof Date))return!1;var r=w.dateToMinutes(t);e=1*e||60*W;var a=r+e,s=w.addMinutes(new Date(t),e);n.end&&("string"===typeof n.end&&(n.end=w.stringToDate(n.end)),n.endTimeMinutes=w.dateToMinutes(n.end));var o=Object(E["a"])(Object(E["a"])({},this.eventDefaults),{},{_eid:"".concat(this._vuecal._uid,"_").concat(this._vuecal.eventIdIncrement++),start:t,startTimeMinutes:r,end:s,endTimeMinutes:a,segments:null},n);return"function"!==typeof this._vuecal.onEventCreate||this._vuecal.onEventCreate(o,(function(){return i.deleteAnEvent(o)}))?(o.startDateF!==o.endDateF&&(o.daysCount=w.countDays(o.start,o.end)),this._vuecal.mutableEvents.push(o),this._vuecal.addEventsToView([o]),this._vuecal.emitWithEvent("event-create",o),this._vuecal.$emit("event-change",{event:this._vuecal.cleanupEvent(o),originalEvent:null}),o):void 0}},{key:"addEventSegment",value:function(t){t.segments||(A.a.set(t,"segments",{}),A.a.set(t.segments,w.formatDateLite(t.start),{start:t.start,startTimeMinutes:t.startTimeMinutes,endTimeMinutes:V,isFirstDay:!0,isLastDay:!1}));var e=t.segments[w.formatDateLite(t.end)];e&&(e.isLastDay=!1,e.endTimeMinutes=V);var n=w.addDays(t.end,1),i=w.formatDateLite(n);return n.setHours(0,0,0,0),A.a.set(t.segments,i,{start:n,startTimeMinutes:0,endTimeMinutes:t.endTimeMinutes,isFirstDay:!1,isLastDay:!0}),t.end=w.addMinutes(n,t.endTimeMinutes),t.daysCount=Object.keys(t.segments).length,i}},{key:"removeEventSegment",value:function(t){var e=Object.keys(t.segments).length;if(e<=1)return w.formatDateLite(t.end);A.a.delete(t.segments,w.formatDateLite(t.end)),e--;var n=w.subtractDays(t.end,1),i=w.formatDateLite(n),r=t.segments[i];return e?r&&(r.isLastDay=!0,r.endTimeMinutes=t.endTimeMinutes):t.segments=null,t.daysCount=e||1,t.end=n,i}},{key:"createEventSegments",value:function(t,e,n){var i,r,a,s=e.getTime(),o=n.getTime(),l=t.start.getTime(),u=t.end.getTime(),c=!1;t.end.getHours()||t.end.getMinutes()||(u-=1e3),A.a.set(t,"segments",{}),t.repeat?(i=s,r=Math.min(o,t.repeat.until?w.stringToDate(t.repeat.until).getTime():o)):(i=Math.max(s,l),r=Math.min(o,u));while(i<=r){var d=!1,f=w.addDays(new Date(i),1).setHours(0,0,0,0),v=void 0,h=void 0,p=void 0,y=void 0;if(t.repeat){var m=new Date(i),g=w.formatDateLite(m);(c||t.occurrences&&t.occurrences[g])&&(c||(l=t.occurrences[g].start,a=new Date(l).setHours(0,0,0,0),u=t.occurrences[g].end),c=!0,d=!0),v=i===a,h=g===w.formatDateLite(new Date(u)),p=new Date(v?l:i),y=w.formatDateLite(p),h&&(c=!1)}else d=!0,v=i===l,h=r===u&&f>r,p=v?t.start:new Date(i),y=w.formatDateLite(v?t.start:p);d&&A.a.set(t.segments,y,{start:p,startTimeMinutes:v?t.startTimeMinutes:0,endTimeMinutes:h?t.endTimeMinutes:V,isFirstDay:v,isLastDay:h}),i=f}return t}},{key:"deleteAnEvent",value:function(t){this._vuecal.emitWithEvent("event-delete",t),this._vuecal.mutableEvents=this._vuecal.mutableEvents.filter((function(e){return e._eid!==t._eid})),this._vuecal.view.events=this._vuecal.view.events.filter((function(e){return e._eid!==t._eid}))}},{key:"checkCellOverlappingEvents",value:function(t,e){var n=this;_=t.slice(0),D={},t.forEach((function(t){_.shift(),D[t._eid]||A.a.set(D,t._eid,{overlaps:[],start:t.start,position:0}),D[t._eid].position=0,_.forEach((function(i){D[i._eid]||A.a.set(D,i._eid,{overlaps:[],start:i.start,position:0});var r,a,s=n.eventInRange(i,t.start,t.end),o=e.overlapsPerTimeStep?w.datesInSameTimeStep(t.start,i.start,e.timeStep):1;t.background||t.allDay||i.background||i.allDay||!s||!o?((r=(D[t._eid]||{overlaps:[]}).overlaps.indexOf(i._eid))>-1&&D[t._eid].overlaps.splice(r,1),(a=(D[i._eid]||{overlaps:[]}).overlaps.indexOf(t._eid))>-1&&D[i._eid].overlaps.splice(a,1),D[i._eid].position--):(D[t._eid].overlaps.push(i._eid),D[t._eid].overlaps=h(new Set(D[t._eid].overlaps)),D[i._eid].overlaps.push(t._eid),D[i._eid].overlaps=h(new Set(D[i._eid].overlaps)),D[i._eid].position++)}))}));var i=0,r=function(t){var e=D[t],r=e.overlaps.map((function(t){return{id:t,start:D[t].start}}));r.push({id:t,start:e.start}),r.sort((function(t,e){return t.start<e.start?-1:t.start>e.start?1:t.id>e.id?-1:1})),e.position=r.findIndex((function(e){return e.id===t})),i=Math.max(n.getOverlapsStreak(e,D),i)};for(var a in D)r(a);return[D,i]}},{key:"getOverlapsStreak",value:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{},n=t.overlaps.length+1,i=[];return t.overlaps.forEach((function(n){if(!i.includes(n)){var r=t.overlaps.filter((function(t){return t!==n}));r.forEach((function(t){e[t].overlaps.includes(n)||i.push(t)}))}})),i=h(new Set(i)),n-=i.length,n}},{key:"eventInRange",value:function(t,e,n){if(t.allDay||!this._vuecal.time){var i=new Date(t.start).setHours(0,0,0,0),r=new Date(t.end).setHours(23,59,0,0);return r>=new Date(e).setHours(0,0,0,0)&&i<=new Date(n).setHours(0,0,0,0)}var a=t.start.getTime(),s=t.end.getTime();return a<n.getTime()&&s>e.getTime()}}]),t}(),P=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"vuecal__header"},[t.options.hideViewSelector?t._e():n("div",{staticClass:"vuecal__flex vuecal__menu",attrs:{role:"tablist","aria-label":"Calendar views navigation"}},t._l(t.viewProps.views,(function(e,i){return e.enabled?n("button",{staticClass:"vuecal__view-btn",class:{"vuecal__view-btn--active":t.view.id===i,"vuecal__view-btn--highlighted":t.highlightedControl===i},attrs:{type:"button","aria-label":e.label+" view"},on:{dragenter:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragEnter(e,i,t.$data)},dragleave:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragLeave(e,i,t.$data)},click:function(e){return t.switchView(i,null,!0)}}},[t._v(t._s(e.label))]):t._e()})),0),t.options.hideTitleBar?t._e():n("div",{staticClass:"vuecal__title-bar"},[n("button",{staticClass:"vuecal__arrow vuecal__arrow--prev",class:{"vuecal__arrow--highlighted":"previous"===t.highlightedControl},attrs:{type:"button","aria-label":"Previous "+t.view.id},on:{click:t.previous,dragenter:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragEnter(e,"previous",t.$data)},dragleave:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragLeave(e,"previous",t.$data)}}},[t._t("arrow-prev")],2),n("div",{staticClass:"vuecal__flex vuecal__title",attrs:{grow:""}},[n(t.options.transitions?"transition":"div",{tag:"component",attrs:{name:"slide-fade--"+t.transitionDirection}},[n(t.broaderView?"button":"span",{key:""+t.view.id+t.view.startDate.toString(),tag:"component",attrs:{type:!!t.broaderView&&"button","aria-label":!!t.broaderView&&"Go to "+t.broaderView+" view"},on:{click:function(e){t.broaderView&&t.switchToBroaderView()}}},[t._t("title")],2)],1)],1),t.options.todayButton?n("button",{staticClass:"vuecal__today-btn",class:{"vuecal__today-btn--highlighted":"today"===t.highlightedControl},attrs:{type:"button","aria-label":"Today"},on:{click:t.goToToday,dragenter:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragEnter(e,"today",t.$data)},dragleave:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragLeave(e,"today",t.$data)}}},[t._t("today-button")],2):t._e(),n("button",{staticClass:"vuecal__arrow vuecal__arrow--next",class:{"vuecal__arrow--highlighted":"next"===t.highlightedControl},attrs:{type:"button","aria-label":"Next "+t.view.id},on:{click:t.next,dragenter:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragEnter(e,"next",t.$data)},dragleave:function(e){t.editEvents.drag&&t.dnd&&t.dnd.viewSelectorDragLeave(e,"next",t.$data)}}},[t._t("arrow-next")],2)]),t.viewProps.weekDaysInHeader?n("weekdays-headings",{attrs:{"week-days":t.weekDays,"transition-direction":t.transitionDirection,"switch-to-narrower-view":t.switchToNarrowerView},scopedSlots:t._u([{key:"weekday-heading",fn:function(e){var n=e.heading,i=e.view;return[t._t("weekday-heading",null,{heading:n,view:i})]}},{key:"split-label",fn:function(e){var n=e.split;return[t._t("split-label",null,{split:n,view:t.view})]}}],null,!0)}):t._e(),n("transition",{attrs:{name:"slide-fade--"+t.transitionDirection}},[t.showDaySplits?n("div",{staticClass:"vuecal__flex vuecal__split-days-headers"},t._l(t.daySplits,(function(e,i){return n("div",{key:i,staticClass:"day-split-header",class:e.class||!1},[t._t("split-label",[t._v(t._s(e.label))],{split:e,view:t.view.id})],2)})),0):t._e()])],1)},H=[],L=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"vuecal__flex vuecal__weekdays-headings"},t._l(t.headings,(function(e,i){return e.hide?t._e():n("div",{key:i,staticClass:"vuecal__flex vuecal__heading",class:{today:e.today,clickable:t.cellHeadingsClickable},style:t.weekdayCellStyles,on:{click:function(n){"week"===t.view.id&&t.selectCell(e.date,n)},dblclick:function(e){"week"===t.view.id&&t.vuecal.dblclickToNavigate&&t.switchToNarrowerView()}}},[n("transition",{attrs:{name:"slide-fade--"+t.transitionDirection,appear:t.vuecal.transitions}},[n("div",{key:!!t.vuecal.transitions&&i+"-"+e.dayOfMonth,staticClass:"vuecal__flex",attrs:{column:""}},[n("div",{staticClass:"vuecal__flex weekday-label",attrs:{grow:""}},[t._t("weekday-heading",[n("span",{staticClass:"full"},[t._v(t._s(e.full))]),n("span",{staticClass:"small"},[t._v(t._s(e.small))]),n("span",{staticClass:"xsmall"},[t._v(t._s(e.xsmall))]),e.dayOfMonth?n("span",[t._v(" "+t._s(e.dayOfMonth))]):t._e()],{heading:t.cleanupHeading(e),view:t.view})],2),t.vuecal.hasSplits&&t.vuecal.stickySplitLabels?n("div",{staticClass:"vuecal__flex vuecal__split-days-headers",attrs:{grow:""}},t._l(t.vuecal.daySplits,(function(e,i){return n("div",{key:i,staticClass:"day-split-header",class:e.class||!1},[t._t("split-label",[t._v(t._s(e.label))],{split:e,view:t.view})],2)})),0):t._e()])])],1)})),0)},N=[],Y={inject:["vuecal","utils","view"],props:{transitionDirection:{type:String,default:"right"},weekDays:{type:Array,default:function(){return[]}},switchToNarrowerView:{type:Function,default:function(){}}},methods:{selectCell:function(t,e){t.getTime()!==this.view.selectedDate.getTime()&&(this.view.selectedDate=t),this.utils.cell.selectCell(!1,t,e)},cleanupHeading:function(t){return Object(E["a"])({label:t.full,date:t.date},t.today?{today:t.today}:{})}},computed:{headings:function(){var t=this;if(!["month","week"].includes(this.view.id))return[];var e=!1,n=this.weekDays.map((function(n,i){var r=t.utils.date.addDays(t.view.startDate,i);return Object(E["a"])({hide:n.hide,full:n.label,small:n.short||n.label.substr(0,3),xsmall:n.short||n.label.substr(0,1)},"week"===t.view.id?{dayOfMonth:r.getDate(),date:r,today:!e&&t.utils.date.isToday(r)&&!e++}:{})}));return n},cellWidth:function(){return 100/(7-this.weekDays.reduce((function(t,e){return t+e.hide}),0))},weekdayCellStyles:function(){return Object(E["a"])({},this.vuecal.hideWeekdays.length?{width:"".concat(this.cellWidth,"%")}:{})},cellHeadingsClickable:function(){return"week"===this.view.id&&(this.vuecal.clickToNavigate||this.vuecal.dblclickToNavigate)}}},F=Y;n("9735");function R(t,e,n,i,r,a,s,o){var l,u="function"===typeof t?t.options:t;if(e&&(u.render=e,u.staticRenderFns=n,u._compiled=!0),i&&(u.functional=!0),a&&(u._scopeId="data-v-"+a),s?(l=function(t){t=t||this.$vnode&&this.$vnode.ssrContext||this.parent&&this.parent.$vnode&&this.parent.$vnode.ssrContext,t||"undefined"===typeof __VUE_SSR_CONTEXT__||(t=__VUE_SSR_CONTEXT__),r&&r.call(this,t),t&&t._registeredComponents&&t._registeredComponents.add(s)},u._ssrRegister=l):r&&(l=o?function(){r.call(this,(u.functional?this.parent:this).$root.$options.shadowRoot)}:r),l)if(u.functional){u._injectStyles=l;var c=u.render;u.render=function(t,e){return l.call(e),c(t,e)}}else{var d=u.beforeCreate;u.beforeCreate=d?[].concat(d,l):[l]}return{exports:t,options:u}}var $=R(F,L,N,!1,null,null,null),B=$.exports,z={inject:["vuecal","previous","next","switchView","updateSelectedDate","modules","view"],components:{WeekdaysHeadings:B},props:{options:{type:Object,default:function(){return{}}},editEvents:{type:Object,required:!0},hasSplits:{type:[Boolean,Number],default:!1},daySplits:{type:Array,default:function(){return[]}},viewProps:{type:Object,default:function(){return{}}},weekDays:{type:Array,default:function(){return[]}},switchToNarrowerView:{type:Function,default:function(){}}},data:function(){return{highlightedControl:null}},methods:{goToToday:function(){this.updateSelectedDate(new Date((new Date).setHours(0,0,0,0)))},switchToBroaderView:function(){this.transitionDirection="left",this.broaderView&&this.switchView(this.broaderView)}},computed:{transitionDirection:{get:function(){return this.vuecal.transitionDirection},set:function(t){this.vuecal.transitionDirection=t}},broaderView:function(){var t=this.vuecal.enabledViews;return t[t.indexOf(this.view.id)-1]},showDaySplits:function(){return"day"===this.view.id&&this.hasSplits&&this.options.stickySplitLabels&&!this.options.minSplitWidth},dnd:function(){return this.modules.dnd}}},U=z,G=(n("dc34"),R(U,P,H,!1,null,null,null)),q=G.exports,X=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"vuecal__flex vuecal__all-day",style:t.cellOrSplitMinWidth&&{height:t.height}},[t.cellOrSplitMinWidth?t._e():n("div",{staticClass:"vuecal__all-day-text",staticStyle:{width:"3em"}},[n("span",[t._v(t._s(t.label))])]),n("div",{staticClass:"vuecal__flex vuecal__cells",class:t.view.id+"-view",style:t.cellOrSplitMinWidth?"min-width: "+t.cellOrSplitMinWidth+"px":"",attrs:{grow:""}},t._l(t.cells,(function(e,i){return n("vuecal-cell",{key:i,attrs:{options:t.options,"edit-events":t.editEvents,data:e,"all-day":!0,"cell-width":t.options.hideWeekdays.length&&(t.vuecal.isWeekView||t.vuecal.isMonthView)&&t.vuecal.cellWidth,"min-timestamp":t.options.minTimestamp,"max-timestamp":t.options.maxTimestamp,"cell-splits":t.daySplits},scopedSlots:t._u([{key:"event",fn:function(e){var n=e.event,i=e.view;return[t._t("event",null,{view:i,event:n})]}}],null,!0)})})),1)])},K=[],J=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("transition-group",{staticClass:"vuecal__cell",class:t.cellClasses,style:t.cellStyles,attrs:{name:"slide-fade--"+t.transitionDirection,tag:"div",appear:t.options.transitions}},[t._l(t.splitsCount?t.splits:1,(function(e,i){return n("div",{key:t.options.transitions?t.view.id+"-"+t.data.content+"-"+i:i,staticClass:"vuecal__flex vuecal__cell-content",class:t.splitsCount&&t.splitClasses(e),attrs:{"data-split":!!t.splitsCount&&e.id,column:"",tabindex:"0","aria-label":t.data.content},on:{focus:function(e){return t.onCellFocus(e)},keypress:function(e){return!e.type.indexOf("key")&&t._k(e.keyCode,"enter",13,e.key,"Enter")?null:t.onCellkeyPressEnter(e)},touchstart:function(n){!t.isDisabled&&t.onCellTouchStart(n,t.splitsCount?e.id:null)},mousedown:function(n){!t.isDisabled&&t.onCellMouseDown(n,t.splitsCount?e.id:null)},click:function(e){!t.isDisabled&&t.onCellClick(e)},dblclick:function(e){!t.isDisabled&&t.onCellDblClick(e)},contextmenu:function(e){!t.isDisabled&&t.options.cellContextmenu&&t.onCellContextMenu(e)},dragenter:function(e){!t.isDisabled&&t.editEvents.drag&&t.dnd&&t.dnd.cellDragEnter(e,t.$data,t.data.startDate)},dragover:function(n){!t.isDisabled&&t.editEvents.drag&&t.dnd&&t.dnd.cellDragOver(n,t.$data,t.data.startDate,t.splitsCount?e.id:null)},dragleave:function(e){!t.isDisabled&&t.editEvents.drag&&t.dnd&&t.dnd.cellDragLeave(e,t.$data,t.data.startDate)},drop:function(n){!t.isDisabled&&t.editEvents.drag&&t.dnd&&t.dnd.cellDragDrop(n,t.$data,t.data.startDate,t.splitsCount?e.id:null)}}},[t.isWeekOrDayView&&!t.allDay&&t.specialHours.length?t._l(t.specialHours,(function(t,e){return n("div",{staticClass:"vuecal__special-hours",class:"vuecal__special-hours--day"+t.day+" "+t.class,style:"height: "+t.height+"px;top: "+t.top+"px"})})):t._e(),t._t("cell-content",null,{events:t.events,selectCell:function(e){return t.selectCell(e,!0)},split:!!t.splitsCount&&e}),t.eventsCount&&(t.isWeekOrDayView||"month"===t.view.id&&t.options.eventsOnMonthView)?n("div",{staticClass:"vuecal__cell-events"},t._l(t.splitsCount?e.events:t.events,(function(i,r){return n("event",{key:r,attrs:{"cell-formatted-date":t.data.formattedDate,event:i,"all-day":t.allDay,"cell-events":t.splitsCount?e.events:t.events,overlaps:((t.splitsCount?e.overlaps[i._eid]:t.cellOverlaps[i._eid])||[]).overlaps,"event-position":((t.splitsCount?e.overlaps[i._eid]:t.cellOverlaps[i._eid])||[]).position,"overlaps-streak":t.splitsCount?e.overlapsStreak:t.cellOverlapsStreak},scopedSlots:t._u([{key:"event",fn:function(e){var n=e.event,i=e.view;return[t._t("event",null,{view:i,event:n})]}}],null,!0)})})),1):t._e()],2)})),t.timelineVisible?n("div",{key:t.options.transitions?t.view.id+"-now-line":"now-line",staticClass:"vuecal__now-line",style:"top: "+t.todaysTimePosition+"px",attrs:{title:t.utils.date.formatTime(t.vuecal.now)}}):t._e()],2)},Q=[];function Z(t){if(Array.isArray(t))return t}function tt(t,e){if("undefined"!==typeof Symbol&&Symbol.iterator in Object(t)){var n=[],i=!0,r=!1,a=void 0;try{for(var s,o=t[Symbol.iterator]();!(i=(s=o.next()).done);i=!0)if(n.push(s.value),e&&n.length===e)break}catch(l){r=!0,a=l}finally{try{i||null==o["return"]||o["return"]()}finally{if(r)throw a}}return n}}function et(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function nt(t,e){return Z(t)||tt(t,e)||f(t,e)||et()}var it=function(){var t=this,e=t.$createElement,n=t._self._c||e;return n("div",{staticClass:"vuecal__event",class:t.eventClasses,style:t.eventStyles,attrs:{tabindex:"0",draggable:t.draggable},on:{focus:t.focusEvent,keypress:function(e){return!e.type.indexOf("key")&&t._k(e.keyCode,"enter",13,e.key,"Enter")?null:(e.stopPropagation(),t.onEnterKeypress(e))},mouseenter:t.onMouseEnter,mouseleave:t.onMouseLeave,touchstart:function(e){return e.stopPropagation(),t.onTouchStart(e)},mousedown:function(e){t.onMouseDown(e)},mouseup:t.onMouseUp,touchend:t.onMouseUp,touchmove:t.onTouchMove,dblclick:t.onDblClick,dragstart:function(e){t.draggable&&t.onDragStart(e)},dragend:function(e){t.draggable&&t.onDragEnd()}}},[t.vuecal.editEvents.delete&&t.event.deletable?n("div",{staticClass:"vuecal__event-delete",on:{click:function(e){return e.stopPropagation(),t.deleteEvent(e)},touchstart:function(e){return e.stopPropagation(),t.touchDeleteEvent(e)}}},[t._v(t._s(t.vuecal.texts.deleteEvent))]):t._e(),t._t("event",null,{event:t.event,view:t.view.id}),t.resizable?n("div",{staticClass:"vuecal__event-resize-handle",attrs:{contenteditable:"false"},on:{mousedown:function(e){return e.stopPropagation(),e.preventDefault(),t.onResizeHandleMouseDown(e)},touchstart:function(e){return e.stopPropagation(),e.preventDefault(),t.onResizeHandleMouseDown(e)}}}):t._e()],2)},rt=[],at={inject:["vuecal","utils","modules","view","domEvents","editEvents"],props:{cellFormattedDate:{type:String,default:""},event:{type:Object,default:function(){return{}}},cellEvents:{type:Array,default:function(){return[]}},overlaps:{type:Array,default:function(){return[]}},eventPosition:{type:Number,default:0},overlapsStreak:{type:Number,default:0},allDay:{type:Boolean,default:!1}},data:function(){return{touch:{dragThreshold:30,startX:0,startY:0,dragged:!1}}},methods:{onMouseDown:function(t){var e=this,n=arguments.length>1&&void 0!==arguments[1]&&arguments[1];if("ontouchstart"in window&&!n)return!1;var i=this.domEvents,r=i.clickHoldAnEvent,a=i.focusAnEvent,s=i.resizeAnEvent,o=i.dragAnEvent;if(a._eid===this.event._eid&&r._eid===this.event._eid)return!0;this.focusEvent(),r._eid=null,this.vuecal.editEvents.delete&&this.event.deletable&&(r.timeoutId=setTimeout((function(){s._eid||o._eid||(r._eid=e.event._eid,e.event.deleting=!0)}),r.timeout))},onMouseUp:function(t){this.domEvents.focusAnEvent._eid!==this.event._eid||this.touch.dragged||(this.domEvents.focusAnEvent.mousedUp=!0),this.touch.dragged=!1},onMouseEnter:function(t){t.preventDefault(),this.vuecal.emitWithEvent("event-mouse-enter",this.event)},onMouseLeave:function(t){t.preventDefault(),this.vuecal.emitWithEvent("event-mouse-leave",this.event)},onTouchMove:function(t){if("function"===typeof this.vuecal.onEventClick){var e=t.touches[0],n=e.clientX,i=e.clientY,r=this.touch,a=r.startX,s=r.startY,o=r.dragThreshold;(Math.abs(n-a)>o||Math.abs(i-s)>o)&&(this.touch.dragged=!0)}},onTouchStart:function(t){this.touch.startX=t.touches[0].clientX,this.touch.startY=t.touches[0].clientY,this.onMouseDown(t,!0)},onEnterKeypress:function(t){if("function"===typeof this.vuecal.onEventClick)return this.vuecal.onEventClick(this.event,t)},onDblClick:function(t){if("function"===typeof this.vuecal.onEventDblclick)return this.vuecal.onEventDblclick(this.event,t)},onDragStart:function(t){this.dnd&&this.dnd.eventDragStart(t,this.event)},onDragEnd:function(){this.dnd&&this.dnd.eventDragEnd(this.event)},onResizeHandleMouseDown:function(){this.focusEvent(),this.domEvents.dragAnEvent._eid=null,this.domEvents.resizeAnEvent=Object.assign(this.domEvents.resizeAnEvent,{_eid:this.event._eid,start:(this.segment||this.event).start,split:this.event.split||null,segment:!!this.segment&&this.utils.date.formatDateLite(this.segment.start),originalEnd:new Date((this.segment||this.event).end),originalEndTimeMinutes:this.event.endTimeMinutes}),this.event.resizing=!0},deleteEvent:function(){var t=arguments.length>0&&void 0!==arguments[0]&&arguments[0];if("ontouchstart"in window&&!t)return!1;this.utils.event.deleteAnEvent(this.event)},touchDeleteEvent:function(t){this.deleteEvent(!0)},cancelDeleteEvent:function(){this.event.deleting=!1},focusEvent:function(){var t=this.domEvents.focusAnEvent,e=t._eid;if(e!==this.event._eid){if(e){var n=this.view.events.find((function(t){return t._eid===e}));n&&(n.focused=!1)}this.vuecal.cancelDelete(),this.vuecal.emitWithEvent("event-focus",this.event),t._eid=this.event._eid,this.event.focused=!0}}},computed:{eventDimensions:function(){var t=this.segment||this.event,e=t.startTimeMinutes,n=t.endTimeMinutes,i=e-this.vuecal.timeFrom,r=Math.max(Math.round(i*this.vuecal.timeCellHeight/this.vuecal.timeStep),0);i=Math.min(n,this.vuecal.timeTo)-this.vuecal.timeFrom;var a=Math.round(i*this.vuecal.timeCellHeight/this.vuecal.timeStep),s=Math.max(a-r,5);return{top:r,height:s}},eventStyles:function(){if(this.event.allDay||!this.vuecal.time||!this.event.endTimeMinutes||"month"===this.view.id||this.allDay)return{};var t=100/Math.min(this.overlaps.length+1,this.overlapsStreak),e=100/(this.overlaps.length+1)*this.eventPosition;this.vuecal.minEventWidth&&t<this.vuecal.minEventWidth&&(t=this.vuecal.minEventWidth,e=(100-this.vuecal.minEventWidth)/this.overlaps.length*this.eventPosition);var n=this.eventDimensions,i=n.top,r=n.height;return{top:"".concat(i,"px"),height:"".concat(r,"px"),width:"".concat(t,"%"),left:this.event.left&&"".concat(this.event.left,"px")||"".concat(e,"%")}},eventClasses:function(){var t,e=this.segment||{},n=e.isFirstDay,i=e.isLastDay;return t={},Object(l["a"])(t,this.event.class,!!this.event.class),Object(l["a"])(t,"vuecal__event--focus",this.event.focused),Object(l["a"])(t,"vuecal__event--resizing",this.event.resizing),Object(l["a"])(t,"vuecal__event--background",this.event.background),Object(l["a"])(t,"vuecal__event--deletable",this.event.deleting),Object(l["a"])(t,"vuecal__event--all-day",this.event.allDay),Object(l["a"])(t,"vuecal__event--dragging",!this.event.draggingStatic&&this.event.dragging),Object(l["a"])(t,"vuecal__event--static",this.event.dragging&&this.event.draggingStatic),Object(l["a"])(t,"vuecal__event--multiple-days",!!this.segment),Object(l["a"])(t,"event-start",this.segment&&n&&!i),Object(l["a"])(t,"event-middle",this.segment&&!n&&!i),Object(l["a"])(t,"event-end",this.segment&&i&&!n),t},segment:function(){return this.event.segments&&this.event.segments[this.cellFormattedDate]||null},draggable:function(){var t=this.event,e=t.draggable,n=t.background,i=t.daysCount;return this.vuecal.editEvents.drag&&e&&!n&&1===i},resizable:function(){var t=this.vuecal,e=t.editEvents,n=t.time;return e.resize&&this.event.resizable&&n&&!this.allDay&&(!this.segment||this.segment&&this.segment.isLastDay)&&"month"!==this.view.id},dnd:function(){return this.modules.dnd}}},st=at,ot=(n("61f2"),R(st,it,rt,!1,null,null,null)),lt=ot.exports,ut={inject:["vuecal","utils","modules","view","domEvents"],components:{Event:lt},props:{options:{type:Object,default:function(){return{}}},editEvents:{type:Object,required:!0},data:{type:Object,required:!0},cellSplits:{type:Array,default:function(){return[]}},minTimestamp:{type:[Number,null],default:null},maxTimestamp:{type:[Number,null],default:null},cellWidth:{type:[Number,Boolean],default:!1},allDay:{type:Boolean,default:!1}},data:function(){return{cellOverlaps:{},cellOverlapsStreak:1,timeAtCursor:null,highlighted:!1,highlightedSplit:null}},methods:{getSplitAtCursor:function(t){var e=t.target,n=e.classList.contains("vuecal__cell-split"),i=n?e:this.vuecal.findAncestor(e,"vuecal__cell-split");return i&&(i=i.attributes["data-split"].value,parseInt(i).toString()===i.toString()&&(i=parseInt(i))),i||null},splitClasses:function(t){return Object(l["a"])({"vuecal__cell-split":!0,"vuecal__cell-split--highlighted":this.highlightedSplit===t.id},t.class,!!t.class)},checkCellOverlappingEvents:function(){if(this.options.time&&this.eventsCount&&!this.splitsCount)if(1===this.eventsCount)this.cellOverlaps=[],this.cellOverlapsStreak=1;else{var t=this.utils.event.checkCellOverlappingEvents(this.events,this.options),e=nt(t,2);this.cellOverlaps=e[0],this.cellOverlapsStreak=e[1]}},isDOMElementAnEvent:function(t){return this.vuecal.isDOMElementAnEvent(t)},selectCell:function(t){var e=arguments.length>1&&void 0!==arguments[1]&&arguments[1],n=this.splitsCount?this.getSplitAtCursor(t):null;this.utils.cell.selectCell(e,this.timeAtCursor,n),this.timeAtCursor=null},onCellkeyPressEnter:function(t){this.isSelected||this.onCellFocus(t);var e=this.splitsCount?this.getSplitAtCursor(t):null;this.utils.cell.keyPressEnterCell(this.timeAtCursor,e),this.timeAtCursor=null},onCellFocus:function(t){if(!this.isSelected&&!this.isDisabled){this.isSelected=this.data.startDate;var e=this.splitsCount?this.getSplitAtCursor(t):null,n=this.timeAtCursor||this.data.startDate;this.vuecal.$emit("cell-focus",e?{date:n,split:e}:n)}},onCellMouseDown:function(t){var e=arguments.length>2&&void 0!==arguments[2]&&arguments[2];if("ontouchstart"in window&&!e)return!1;this.isSelected||this.onCellFocus(t);var n=this.domEvents,i=n.clickHoldACell,r=n.focusAnEvent;this.domEvents.cancelClickEventCreation=!1,i.eventCreated=!1,this.timeAtCursor=new Date(this.data.startDate);var a=this.vuecal.minutesAtCursor(t),s=a.minutes,o=a.cursorCoords.y;this.timeAtCursor.setMinutes(s);var l=this.isDOMElementAnEvent(t.target);!l&&r._eid&&((this.view.events.find((function(t){return t._eid===r._eid}))||{}).focused=!1),this.editEvents.create&&!l&&this.setUpEventCreation(t,o)},setUpEventCreation:function(t,e){if(this.options.dragToCreateEvent&&["week","day"].includes(this.view.id)){var n=this.domEvents.dragCreateAnEvent;if(n.startCursorY=e,n.split=this.splitsCount?this.getSplitAtCursor(t):null,n.start=this.timeAtCursor,this.options.snapToTime){var i=60*this.timeAtCursor.getHours()+this.timeAtCursor.getMinutes(),r=i+this.options.snapToTime/2;i=r-r%this.options.snapToTime,n.start.setHours(0,i,0,0)}}else this.options.cellClickHold&&["month","week","day"].includes(this.view.id)&&this.setUpCellHoldTimer(t)},setUpCellHoldTimer:function(t){var e=this,n=this.domEvents.clickHoldACell;n.cellId="".concat(this.vuecal._uid,"_").concat(this.data.formattedDate),n.split=this.splitsCount?this.getSplitAtCursor(t):null,n.timeoutId=setTimeout((function(){if(n.cellId&&!e.domEvents.cancelClickEventCreation){var t=e.utils.event.createAnEvent(e.timeAtCursor,null,n.split?{split:n.split}:{}),i=t._eid;n.eventCreated=i}}),n.timeout)},onCellTouchStart:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null;this.onCellMouseDown(t,e,!0)},onCellClick:function(t){this.isDOMElementAnEvent(t.target)||this.selectCell(t)},onCellDblClick:function(t){var e=new Date(this.data.startDate);e.setMinutes(this.vuecal.minutesAtCursor(t).minutes);var n=this.splitsCount?this.getSplitAtCursor(t):null;this.vuecal.$emit("cell-dblclick",n?{date:e,split:n}:e),this.options.dblclickToNavigate&&this.vuecal.switchToNarrowerView()},onCellContextMenu:function(t){t.stopPropagation(),t.preventDefault();var e=new Date(this.data.startDate),n=this.vuecal.minutesAtCursor(t),i=n.cursorCoords,r=n.minutes;e.setMinutes(r);var a=this.splitsCount?this.getSplitAtCursor(t):null;this.vuecal.$emit("cell-contextmenu",Object(E["a"])(Object(E["a"])(Object(E["a"])({date:e},i),a||{}),{},{e:t}))}},computed:{dnd:function(){return this.modules.dnd},nowInMinutes:function(){return this.utils.date.dateToMinutes(this.vuecal.now)},isBeforeMinDate:function(){return null!==this.minTimestamp&&this.minTimestamp>this.data.endDate.getTime()},isAfterMaxDate:function(){return this.maxTimestamp&&this.maxTimestamp<this.data.startDate.getTime()},isDisabled:function(){var t=this.options.disableDays,e=this.vuecal.isYearsOrYearView;return!(!t.length||!t.includes(this.data.formattedDate)||e)||(this.isBeforeMinDate||this.isAfterMaxDate)},isSelected:{get:function(){var t=!1,e=this.view.selectedDate;return t="years"===this.view.id?e.getFullYear()===this.data.startDate.getFullYear():"year"===this.view.id?e.getFullYear()===this.data.startDate.getFullYear()&&e.getMonth()===this.data.startDate.getMonth():e.getTime()===this.data.startDate.getTime(),t},set:function(t){this.view.selectedDate=t}},isWeekOrDayView:function(){return["week","day"].includes(this.view.id)},transitionDirection:function(){return this.vuecal.transitionDirection},specialHours:function(){var t=this;return this.data.specialHours.map((function(e){var n=e.from,i=e.to;return n=Math.max(n,t.options.timeFrom),i=Math.min(i,t.options.timeTo),Object(E["a"])(Object(E["a"])({},e),{},{height:(i-n)*t.timeScale,top:(n-t.options.timeFrom)*t.timeScale})}))},events:function(){var t=this,e=this.data,n=e.startDate,i=e.endDate,r=[];if(!["years","year"].includes(this.view.id)||this.options.eventsCountOnYearView){var a;if(r=this.view.events.slice(0),"month"===this.view.id)(a=r).push.apply(a,h(this.view.outOfScopeEvents));if(r=r.filter((function(e){return t.utils.event.eventInRange(e,n,i)})),this.options.showAllDayEvents&&"month"!==this.view.id&&(r=r.filter((function(e){return!!e.allDay===t.allDay}))),this.options.time&&this.isWeekOrDayView&&!this.allDay){var s=this.options,o=s.timeFrom,l=s.timeTo;r=r.filter((function(e){var n=e.daysCount>1&&e.segments[t.data.formattedDate]||{},i=1===e.daysCount&&e.startTimeMinutes<l&&e.endTimeMinutes>o,r=e.daysCount>1&&n.startTimeMinutes<l&&n.endTimeMinutes>o,a=!1;return e.allDay||i||r||a}))}!this.options.time||!this.isWeekOrDayView||this.options.showAllDayEvents&&this.allDay||r.sort((function(t,e){return t.start<e.start?-1:1})),this.cellSplits.length||this.$nextTick(this.checkCellOverlappingEvents)}return r},eventsCount:function(){return this.events.length},splits:function(){var t=this;return this.cellSplits.map((function(e,n){var i=t.events.filter((function(t){return t.split===e.id})),r=t.utils.event.checkCellOverlappingEvents(i.filter((function(t){return!t.background&&!t.allDay})),t.options),a=nt(r,2),s=a[0],o=a[1];return Object(E["a"])(Object(E["a"])({},e),{},{overlaps:s,overlapsStreak:o,events:i})}))},splitsCount:function(){return this.splits.length},cellClasses:function(){var t;return t={},Object(l["a"])(t,this.data.class,!!this.data.class),Object(l["a"])(t,"vuecal__cell--current",this.data.current),Object(l["a"])(t,"vuecal__cell--today",this.data.today),Object(l["a"])(t,"vuecal__cell--out-of-scope",this.data.outOfScope),Object(l["a"])(t,"vuecal__cell--before-min",this.isDisabled&&this.isBeforeMinDate),Object(l["a"])(t,"vuecal__cell--after-max",this.isDisabled&&this.isAfterMaxDate),Object(l["a"])(t,"vuecal__cell--disabled",this.isDisabled),Object(l["a"])(t,"vuecal__cell--selected",this.isSelected),Object(l["a"])(t,"vuecal__cell--highlighted",this.highlighted),Object(l["a"])(t,"vuecal__cell--has-splits",this.splitsCount),Object(l["a"])(t,"vuecal__cell--has-events",this.eventsCount),t},cellStyles:function(){return Object(E["a"])({},this.cellWidth?{width:"".concat(this.cellWidth,"%")}:{})},timelineVisible:function(){var t=this.options,e=t.time,n=t.timeTo;return this.data.today&&this.isWeekOrDayView&&e&&!this.allDay&&this.nowInMinutes<=n},todaysTimePosition:function(){if(this.data.today&&this.options.time){var t=this.nowInMinutes-this.options.timeFrom;return Math.round(t*this.timeScale)}},timeScale:function(){return this.options.timeCellHeight/this.options.timeStep}}},ct=ut,dt=(n("95dd"),R(ct,J,Q,!1,null,null,null)),ft=dt.exports,vt={inject:["vuecal","view","editEvents"],components:{"vuecal-cell":ft},props:{options:{type:Object,required:!0},cells:{type:Array,required:!0},label:{type:String,required:!0},daySplits:{type:Array,default:function(){return[]}},shortEvents:{type:Boolean,default:!0},height:{type:String,default:""},cellOrSplitMinWidth:{type:Number,default:null}},computed:{hasCellOrSplitWidth:function(){return!!(this.options.minCellWidth||this.daySplits.length&&this.options.minSplitWidth)}}},ht=vt,pt=R(ht,X,K,!1,null,null,null),yt=pt.exports,mt=(n("1332"),1440),gt={weekDays:Array(7).fill(""),weekDaysShort:[],months:Array(12).fill(""),years:"",year:"",month:"",week:"",day:"",today:"",noEvent:"",allDay:"",deleteEvent:"",createEvent:"",dateFormat:"dddd MMMM D, YYYY",am:"am",pm:"pm"},bt=["years","year","month","week","day"],wt=new C(gt),Dt={name:"vue-cal",components:{"vuecal-cell":ft,"vuecal-header":q,WeekdaysHeadings:B,AllDayBar:yt},provide:function(){return{vuecal:this,utils:this.utils,modules:this.modules,previous:this.previous,next:this.next,switchView:this.switchView,updateSelectedDate:this.updateSelectedDate,editEvents:this.editEvents,view:this.view,domEvents:this.domEvents}},props:{activeView:{type:String,default:"week"},allDayBarHeight:{type:[String,Number],default:"25px"},cellClickHold:{type:Boolean,default:!0},cellContextmenu:{type:Boolean,default:!1},clickToNavigate:{type:Boolean,default:!1},dblclickToNavigate:{type:Boolean,default:!0},disableDatePrototypes:{type:Boolean,default:!1},disableDays:{type:Array,default:function(){return[]}},disableViews:{type:Array,default:function(){return[]}},dragToCreateEvent:{type:Boolean,default:!0},dragToCreateThreshold:{type:Number,default:15},editableEvents:{type:[Boolean,Object],default:!1},events:{type:Array,default:function(){return[]}},eventsCountOnYearView:{type:Boolean,default:!1},eventsOnMonthView:{type:[Boolean,String],default:!1},hideBody:{type:Boolean,default:!1},hideTitleBar:{type:Boolean,default:!1},hideViewSelector:{type:Boolean,default:!1},hideWeekdays:{type:Array,default:function(){return[]}},hideWeekends:{type:Boolean,default:!1},locale:{type:[String,Object],default:"en"},maxDate:{type:[String,Date],default:""},minCellWidth:{type:Number,default:0},minDate:{type:[String,Date],default:""},minEventWidth:{type:Number,default:0},minSplitWidth:{type:Number,default:0},onEventClick:{type:[Function,null],default:null},onEventCreate:{type:[Function,null],default:null},onEventDblclick:{type:[Function,null],default:null},overlapsPerTimeStep:{type:Boolean,default:!1},resizeX:{type:Boolean,default:!1},selectedDate:{type:[String,Date],default:""},showAllDayEvents:{type:[Boolean,String],default:!1},showWeekNumbers:{type:[Boolean,String],default:!1},snapToTime:{type:Number,default:0},small:{type:Boolean,default:!1},specialHours:{type:Object,default:function(){return{}}},splitDays:{type:Array,default:function(){return[]}},startWeekOnSunday:{type:Boolean,default:!1},stickySplitLabels:{type:Boolean,default:!1},time:{type:Boolean,default:!0},timeCellHeight:{type:Number,default:40},timeFormat:{type:String,default:""},timeFrom:{type:Number,default:0},timeStep:{type:Number,default:60},timeTo:{type:Number,default:mt},todayButton:{type:Boolean,default:!1},transitions:{type:Boolean,default:!0},twelveHour:{type:Boolean,default:!1},watchRealTime:{type:Boolean,default:!1},xsmall:{type:Boolean,default:!1}},data:function(){return{ready:!1,texts:Object(E["a"])({},gt),utils:{date:!!this.disableDatePrototypes&&wt.removePrototypes()||wt,cell:null,event:null},modules:{dnd:null},view:{id:"",title:"",startDate:null,endDate:null,firstCellDate:null,lastCellDate:null,selectedDate:null,events:[]},eventIdIncrement:1,now:new Date,timeTickerIds:[null,null],domEvents:{resizeAnEvent:{_eid:null,start:null,split:null,segment:null,originalEndTimeMinutes:0,originalEnd:null,end:null,startCell:null,endCell:null},dragAnEvent:{_eid:null},dragCreateAnEvent:{startCursorY:null,start:null,split:null,event:null},focusAnEvent:{_eid:null,mousedUp:!1},clickHoldAnEvent:{_eid:null,timeout:1200,timeoutId:null},dblTapACell:{taps:0,timeout:500},clickHoldACell:{cellId:null,split:null,timeout:1200,timeoutId:null,eventCreated:!1},cancelClickEventCreation:!1},mutableEvents:[],transitionDirection:"right"}},methods:{loadLocale:function(t){var e=this;if("object"===p(this.locale))return this.texts=Object.assign({},gt,t),void this.utils.date.updateTexts(this.texts);"en"===this.locale?this.texts=Object.assign({},gt,n("0a96")):n("4a53")("./"+t).then((function(t){e.texts=Object.assign({},gt,t.default),e.utils.date.updateTexts(e.texts)}))},loadDragAndDrop:function(){var t=this;n.e(39).then(n.bind(null,"a691f")).then((function(e){var n=e.DragAndDrop;t.modules.dnd=new n(t)})).catch((function(){return console.warn("Vue Cal: Missing drag & drop module.")}))},validateView:function(t){return bt.includes(t)||(console.error('Vue Cal: invalid active-view parameter provided: "'.concat(t,'".\nA valid view must be one of: ').concat(bt.join(", "),".")),t="week"),this.enabledViews.includes(t)||(console.warn('Vue Cal: the provided active-view "'.concat(t,'" is disabled. Using the "').concat(this.enabledViews[0],'" view instead.')),t=this.enabledViews[0]),t},switchToNarrowerView:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:null;this.transitionDirection="right";var e=this.enabledViews[this.enabledViews.indexOf(this.view.id)+1];e&&this.switchView(e,t)},switchView:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null,n=arguments.length>2&&void 0!==arguments[2]&&arguments[2];t=this.validateView(t);var i=this.utils.date,r=this.view.startDate&&this.view.startDate.getTime();if(this.transitions&&n){if(this.view.id===t)return;var a=this.enabledViews;this.transitionDirection=a.indexOf(this.view.id)>a.indexOf(t)?"left":"right"}var s=this.view.id;switch(this.view.events=[],this.view.id=t,this.view.firstCellDate=null,this.view.lastCellDate=null,e||(e=this.view.selectedDate||this.view.startDate),t){case"years":this.view.startDate=new Date(25*Math.floor(e.getFullYear()/25)||2e3,0,1),this.view.endDate=new Date(this.view.startDate.getFullYear()+25,0,1),this.view.endDate.setSeconds(-1);break;case"year":this.view.startDate=new Date(e.getFullYear(),0,1),this.view.endDate=new Date(e.getFullYear()+1,0,1),this.view.endDate.setSeconds(-1);break;case"month":this.view.startDate=new Date(e.getFullYear(),e.getMonth(),1),this.view.endDate=new Date(e.getFullYear(),e.getMonth()+1,1),this.view.endDate.setSeconds(-1);var o=new Date(this.view.startDate);if(o.getDay()!==(this.startWeekOnSunday?0:1)&&(o=i.getPreviousFirstDayOfWeek(o,this.startWeekOnSunday)),this.view.firstCellDate=o,this.view.lastCellDate=i.addDays(o,41),this.view.lastCellDate.setHours(23,59,59,0),this.hideWeekends){if([0,6].includes(this.view.firstCellDate.getDay())){var l=6!==this.view.firstCellDate.getDay()||this.startWeekOnSunday?1:2;this.view.firstCellDate=i.addDays(this.view.firstCellDate,l)}if([0,6].includes(this.view.startDate.getDay())){var u=6===this.view.startDate.getDay()?2:1;this.view.startDate=i.addDays(this.view.startDate,u)}if([0,6].includes(this.view.lastCellDate.getDay())){var c=0!==this.view.lastCellDate.getDay()||this.startWeekOnSunday?1:2;this.view.lastCellDate=i.subtractDays(this.view.lastCellDate,c)}if([0,6].includes(this.view.endDate.getDay())){var d=0===this.view.endDate.getDay()?2:1;this.view.endDate=i.subtractDays(this.view.endDate,d)}}break;case"week":e=i.getPreviousFirstDayOfWeek(e,this.startWeekOnSunday);var f=this.hideWeekends?5:7;this.view.startDate=this.hideWeekends&&this.startWeekOnSunday?i.addDays(e,1):e,this.view.startDate.setHours(0,0,0,0),this.view.endDate=i.addDays(e,f),this.view.endDate.setSeconds(-1);break;case"day":this.view.startDate=e,this.view.startDate.setHours(0,0,0,0),this.view.endDate=new Date(e),this.view.endDate.setHours(23,59,59,0);break}this.addEventsToView();var v=this.view.startDate&&this.view.startDate.getTime();if((s!==t||v!==r)&&(this.$emit("update:activeView",t),this.ready)){var h=this.view.startDate,p=Object(E["a"])(Object(E["a"])({view:t,startDate:h,endDate:this.view.endDate},this.isMonthView?{firstCellDate:this.view.firstCellDate,lastCellDate:this.view.lastCellDate,outOfScopeEvents:this.view.outOfScopeEvents.map(this.cleanupEvent)}:{}),{},{events:this.view.events.map(this.cleanupEvent)},this.isWeekView?{week:i.getWeek(this.startWeekOnSunday?i.addDays(h,1):h)}:{});this.$emit("view-change",p)}},previous:function(){this.previousNext(!1)},next:function(){this.previousNext()},previousNext:function(){var t=!(arguments.length>0&&void 0!==arguments[0])||arguments[0],e=this.utils.date;this.transitionDirection=t?"right":"left";var n=t?1:-1,i=null,r=this.view,a=r.startDate,s=r.id;switch(s){case"years":i=new Date(a.getFullYear()+25*n,0,1);break;case"year":i=new Date(a.getFullYear()+1*n,1,1);break;case"month":i=new Date(a.getFullYear(),a.getMonth()+1*n,1);break;case"week":i=e[t?"addDays":"subtractDays"](e.getPreviousFirstDayOfWeek(a,this.startWeekOnSunday),7);break;case"day":i=e[t?"addDays":"subtractDays"](a,1);break}i&&this.switchView(s,i)},addEventsToView:function(){var t,e=this,n=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[],i=this.utils.event,r=this.view,a=r.startDate,s=r.endDate,o=r.firstCellDate,l=r.lastCellDate;if(n.length||(this.view.events=[]),n=n.length?n:h(this.mutableEvents),n&&(!this.isYearsOrYearView||this.eventsCountOnYearView)){var u=n.filter((function(t){return i.eventInRange(t,a,s)}));this.isYearsOrYearView||this.isMonthView&&!this.eventsOnMonthView||(u=u.map((function(t){return t.daysCount>1?i.createEventSegments(t,o||a,l||s):t}))),(t=this.view.events).push.apply(t,h(u)),this.isMonthView&&(this.view.outOfScopeEvents=[],n.forEach((function(t){(i.eventInRange(t,o,a)||i.eventInRange(t,s,l))&&(e.view.events.some((function(e){return e._eid===t._eid}))||e.view.outOfScopeEvents.push(t))})))}},findAncestor:function(t,e){while((t=t.parentElement)&&!t.classList.contains(e));return t},isDOMElementAnEvent:function(t){return t.classList.contains("vuecal__event")||this.findAncestor(t,"vuecal__event")},onMouseMove:function(t){var e=this.domEvents,n=e.resizeAnEvent,i=e.dragAnEvent,r=e.dragCreateAnEvent;(null!==n._eid||null!==i._eid||r.start)&&(t.preventDefault(),n._eid?this.eventResizing(t):this.dragToCreateEvent&&r.start&&this.eventDragCreation(t))},onMouseUp:function(t){var e=this.domEvents,n=e.focusAnEvent,i=e.resizeAnEvent,r=e.clickHoldAnEvent,a=e.clickHoldACell,s=e.dragCreateAnEvent,o=r._eid,l=i._eid,u=!1,c=s.event,d=s.start,f=this.isDOMElementAnEvent(t.target),v=n.mousedUp;if(n.mousedUp=!1,f&&(this.domEvents.cancelClickEventCreation=!0),!a.eventCreated){if(l){var h=i.originalEnd,p=i.originalEndTimeMinutes,y=i.endTimeMinutes,m=this.view.events.find((function(t){return t._eid===i._eid}));if(u=y&&y!==p,m&&m.end.getTime()!==h.getTime()){var g=this.mutableEvents.find((function(t){return t._eid===i._eid}));g.endTimeMinutes=m.endTimeMinutes,g.end=m.end;var b=this.cleanupEvent(m),w=Object(E["a"])(Object(E["a"])({},this.cleanupEvent(m)),{},{end:h,endTimeMinutes:m.originalEndTimeMinutes});this.$emit("event-duration-change",{event:b,oldDate:i.originalEnd,originalEvent:w}),this.$emit("event-change",{event:b,originalEvent:w})}m&&(m.resizing=!1),i._eid=null,i.start=null,i.split=null,i.segment=null,i.originalEndTimeMinutes=null,i.originalEnd=null,i.endTimeMinutes=null,i.startCell=null,i.endCell=null}else d&&(c&&(this.emitWithEvent("event-drag-create",c),s.event.resizing=!1),s.start=null,s.split=null,s.event=null);f||l||this.unfocusEvent(),r.timeoutId&&!o&&(clearTimeout(r.timeoutId),r.timeoutId=null),a.timeoutId&&(clearTimeout(a.timeoutId),a.timeoutId=null);var D="function"===typeof this.onEventClick;if(v&&!u&&!o&&!c&&D){var _=this.view.events.find((function(t){return t._eid===n._eid}));return!_&&this.isMonthView&&(_=this.view.outOfScopeEvents.find((function(t){return t._eid===n._eid}))),_&&this.onEventClick(_,t)}}},onKeyUp:function(t){27===t.keyCode&&this.cancelDelete()},eventResizing:function(t){var e=this.domEvents.resizeAnEvent,n=this.view.events.find((function(t){return t._eid===e._eid}))||{segments:{}},i=this.minutesAtCursor(t),r=i.minutes,a=i.cursorCoords,s=n.segments&&n.segments[e.segment],o=this.utils,l=o.date,u=o.event,c=Math.max(r,this.timeFrom+1,(s||n).startTimeMinutes+1);if(n.endTimeMinutes=e.endTimeMinutes=c,this.snapToTime){var d=n.endTimeMinutes+this.snapToTime/2;n.endTimeMinutes=d-d%this.snapToTime}if(s&&(s.endTimeMinutes=n.endTimeMinutes),n.end.setHours(0,n.endTimeMinutes,n.endTimeMinutes===mt?-1:0,0),this.resizeX&&this.isWeekView){n.daysCount=l.countDays(n.start,n.end);var f=this.$refs.cells,v=f.offsetWidth/f.childElementCount,h=Math.floor(a.x/v);if(null===e.startCell&&(e.startCell=h-(n.daysCount-1)),e.endCell!==h){e.endCell=h;var p=l.addDays(n.start,h-e.startCell),y=Math.max(l.countDays(n.start,p),1);if(y!==n.daysCount){var m=null;m=y>n.daysCount?u.addEventSegment(n):u.removeEventSegment(n),e.segment=m,n.endTimeMinutes+=.001}}}this.$emit("event-resizing",{_eid:n._eid,end:n.end,endTimeMinutes:n.endTimeMinutes})},eventDragCreation:function(t){var e=this.domEvents.dragCreateAnEvent,n=e.start,i=e.startCursorY,r=e.split,a=new Date(n),s=this.minutesAtCursor(t),o=s.minutes,l=s.cursorCoords.y;if(e.event||!(Math.abs(i-l)<this.dragToCreateThreshold))if(e.event){if(a.setHours(0,o,o===mt?-1:0,0),this.snapToTime){var u=60*a.getHours()+a.getMinutes(),c=u+this.snapToTime/2;u=c-c%this.snapToTime,a.setHours(0,u,0,0)}var d=n<a,f=e.event;f.start=d?n:a,f.end=d?a:n,f.startTimeMinutes=60*f.start.getHours()+f.start.getMinutes(),f.endTimeMinutes=60*f.end.getHours()+f.end.getMinutes()}else{if(e.event=this.utils.event.createAnEvent(n,1,{split:r}),!e.event)return e.start=null,e.split=null,void(e.event=null);e.event.resizing=!0}},unfocusEvent:function(){var t=this.domEvents,e=t.focusAnEvent,n=t.clickHoldAnEvent,i=this.view.events.find((function(t){return t._eid===(e._eid||n._eid)}));e._eid=null,n._eid=null,i&&(i.focused=!1,i.deleting=!1)},cancelDelete:function(){var t=this.domEvents.clickHoldAnEvent;if(t._eid){var e=this.view.events.find((function(e){return e._eid===t._eid}));e&&(e.deleting=!1),t._eid=null,t.timeoutId=null}},onEventTitleBlur:function(t,e){if(e.title!==t.target.innerHTML){var n=e.title;e.title=t.target.innerHTML;var i=this.cleanupEvent(e);this.$emit("event-title-change",{event:i,oldTitle:n}),this.$emit("event-change",{event:i,originalEvent:Object(E["a"])(Object(E["a"])({},i),{},{title:n})})}},updateMutableEvents:function(){var t=this,e=this.utils.date;this.mutableEvents=[],this.events.forEach((function(n){var i="string"===typeof n.start?e.stringToDate(n.start):n.start,r=e.formatDateLite(i),a=e.dateToMinutes(i),s=null;"string"===typeof n.end&&n.end.includes("24:00")?(s=new Date(n.end.replace(" 24:00","")),s.setHours(23,59,59,0)):s="string"===typeof n.end?e.stringToDate(n.end):n.end;var o=e.formatDateLite(s),l=e.dateToMinutes(s);l&&l!==mt||(!t.time||"string"===typeof n.end&&10===n.end.length?s.setHours(23,59,59,0):s.setSeconds(s.getSeconds()-1),o=e.formatDateLite(s),l=mt);var u=r!==o;n=Object.assign(Object(E["a"])({},t.utils.event.eventDefaults),n,{_eid:"".concat(t._uid,"_").concat(t.eventIdIncrement++),segments:u?{}:null,start:i,startTimeMinutes:a,end:s,endTimeMinutes:l,daysCount:u?e.countDays(i,s):1,class:n.class}),t.mutableEvents.push(n)}))},minutesAtCursor:function(t){return this.utils.cell.minutesAtCursor(t)},createEvent:function(t,e){var n=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return this.utils.event.createAnEvent(t,e,n)},cleanupEvent:function(t){t=Object(E["a"])({},t);var e=["segments","deletable","deleting","titleEditable","resizable","resizing","draggable","dragging","draggingStatic","focused"];return e.forEach((function(e){e in t&&delete t[e]})),t.repeat||delete t.repeat,t},emitWithEvent:function(t,e){this.$emit(t,this.cleanupEvent(e))},updateSelectedDate:function(t){if(t=t&&"string"===typeof t?this.utils.date.stringToDate(t):new Date(t),t&&t instanceof Date){var e=this.view.selectedDate;e&&(this.transitionDirection=e.getTime()>t.getTime()?"left":"right"),t.setHours(0,0,0,0),e&&e.getTime()===t.getTime()||(this.view.selectedDate=t),this.switchView(this.view.id)}},getWeekNumber:function(t){var e=this.utils.date,n=this.firstCellDateWeekNumber,i=n+t,r=this.startWeekOnSunday?1:0;return i>52?e.getWeek(e.addDays(this.view.firstCellDate,7*t+r)):i},timeTick:function(){this.now=new Date,this.timeTickerIds[1]=setTimeout(this.timeTick,6e4)},updateDateTexts:function(){this.utils.date.updateTexts(this.texts)},alignWithScrollbar:function(){if(!document.getElementById("vuecal-align-with-scrollbar")){var t=this.$refs.vuecal.getElementsByClassName("vuecal__scrollbar-check")[0],e=t.offsetWidth-t.children[0].offsetWidth;if(e){var n=document.createElement("style");n.id="vuecal-align-with-scrollbar",n.type="text/css",n.innerHTML=".vuecal__weekdays-headings,.vuecal__all-day {padding-right: ".concat(e,"px}"),document.head.appendChild(n)}}},cellOrSplitHasEvents:function(t){var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:null;return t.length&&(!e&&t.length||e&&t.some((function(t){return t.split===e.id})))}},created:function(){this.utils.cell=new M(this),this.utils.event=new I(this,this.utils.date),this.loadLocale(this.locale),this.editEvents.drag&&this.loadDragAndDrop(),this.updateMutableEvents(this.events),this.view.id=this.currentView,this.selectedDate?this.updateSelectedDate(this.selectedDate):(this.view.selectedDate=new Date,this.switchView(this.currentView)),this.time&&this.watchRealTime&&(this.timeTickerIds[0]=setTimeout(this.timeTick,1e3*(60-this.now.getSeconds())))},mounted:function(){var t=this.utils.date,e="ontouchstart"in window,n=this.editEvents,i=n.resize,r=n.drag,a=n.create,s=n.delete,o=n.title,l=this.onEventClick&&"function"===typeof this.onEventClick;(i||r||a||s||o||l)&&window.addEventListener(e?"touchend":"mouseup",this.onMouseUp),(i||r||a&&this.dragToCreateEvent)&&window.addEventListener(e?"touchmove":"mousemove",this.onMouseMove,{passive:!1}),o&&window.addEventListener("keyup",this.onKeyUp),e&&(this.$refs.vuecal.oncontextmenu=function(t){t.preventDefault(),t.stopPropagation()}),this.hideBody||this.alignWithScrollbar();var u=this.view.startDate,c=Object(E["a"])(Object(E["a"])({view:this.view.id,startDate:u,endDate:this.view.endDate},this.isMonthView?{firstCellDate:this.view.firstCellDate,lastCellDate:this.view.lastCellDate}:{}),{},{events:this.view.events.map(this.cleanupEvent)},this.isWeekView?{week:t.getWeek(this.startWeekOnSunday?t.addDays(u,1):u)}:{});this.$emit("ready",c),this.ready=!0},beforeDestroy:function(){var t="ontouchstart"in window;window.removeEventListener(t?"touchmove":"mousemove",this.onMouseMove,{passive:!1}),window.removeEventListener(t?"touchend":"mouseup",this.onMouseUp),window.removeEventListener("keyup",this.onKeyUp),this.timeTickerIds[0]&&clearTimeout(this.timeTickerIds[0]),this.timeTickerIds[1]&&clearTimeout(this.timeTickerIds[1]),this.timeTickerIds=[null,null]},computed:{editEvents:function(){return this.editableEvents&&"object"===p(this.editableEvents)?{title:!!this.editableEvents.title,drag:!!this.editableEvents.drag,resize:!!this.editableEvents.resize,create:!!this.editableEvents.create,delete:!!this.editableEvents.delete}:{title:!!this.editableEvents,drag:!!this.editableEvents,resize:!!this.editableEvents,create:!!this.editableEvents,delete:!!this.editableEvents}},views:function(){return{years:{label:this.texts.years,enabled:!this.disableViews.includes("years")},year:{label:this.texts.year,enabled:!this.disableViews.includes("year")},month:{label:this.texts.month,enabled:!this.disableViews.includes("month")},week:{label:this.texts.week,enabled:!this.disableViews.includes("week")},day:{label:this.texts.day,enabled:!this.disableViews.includes("day")}}},currentView:function(){return this.validateView(this.activeView)},enabledViews:function(){var t=this;return Object.keys(this.views).filter((function(e){return t.views[e].enabled}))},hasTimeColumn:function(){return this.time&&this.isWeekOrDayView},isShortMonthView:function(){return this.isMonthView&&"short"===this.eventsOnMonthView},firstCellDateWeekNumber:function(){var t=this.utils.date,e=this.view.firstCellDate;return t.getWeek(this.startWeekOnSunday?t.addDays(e,1):e)},timeCells:function(){for(var t=[],e=this.timeFrom,n=this.timeTo;e<n;e+=this.timeStep)t.push({hours:Math.floor(e/60),minutes:e%60,label:this.utils.date.formatTime(e,this.TimeFormat),value:e});return t},TimeFormat:function(){return this.timeFormat||(this.twelveHour?"h:mm{am}":"HH:mm")},daySplits:function(){return(this.splitDays.filter((function(t){return!t.hide}))||[]).map((function(t,e){return Object(E["a"])(Object(E["a"])({},t),{},{id:t.id||e+1})}))},hasSplits:function(){return this.daySplits.length&&this.isWeekOrDayView},hasShortEvents:function(){return"short"===this.showAllDayEvents},cellOrSplitMinWidth:function(){var t=null;return this.hasSplits&&this.minSplitWidth?t=this.visibleDaysCount*this.minSplitWidth*this.daySplits.length:this.minCellWidth&&this.isWeekView&&(t=this.visibleDaysCount*this.minCellWidth),t},allDayBar:function(){var t=this.allDayBarHeight||null;return t&&!isNaN(t)&&(t+="px"),{cells:this.viewCells,options:this.$props,label:this.texts.allDay,shortEvents:this.hasShortEvents,daySplits:this.hasSplits&&this.daySplits||[],cellOrSplitMinWidth:this.cellOrSplitMinWidth,height:t}},minTimestamp:function(){var t=null;return this.minDate&&"string"===typeof this.minDate?t=this.utils.date.stringToDate(this.minDate):this.minDate&&this.minDate instanceof Date&&(t=this.minDate),t?t.getTime():null},maxTimestamp:function(){var t=null;return this.maxDate&&"string"===typeof this.maxDate?t=this.utils.date.stringToDate(this.maxDate):this.maxDate&&this.minDate instanceof Date&&(t=this.maxDate),t?t.getTime():null},weekDays:function(){var t=this,e=this.texts,n=e.weekDays,i=e.weekDaysShort,r=void 0===i?[]:i;return n=n.slice(0).map((function(e,n){return Object(E["a"])(Object(E["a"])({label:e},r.length?{short:r[n]}:{}),{},{hide:t.hideWeekends&&n>=5||t.hideWeekdays.length&&t.hideWeekdays.includes(n+1)})})),this.startWeekOnSunday&&n.unshift(n.pop()),n},weekDaysInHeader:function(){return this.isMonthView||this.isWeekView&&!this.minCellWidth&&!(this.hasSplits&&this.minSplitWidth)},months:function(){return this.texts.months.map((function(t){return{label:t}}))},specialDayHours:function(){var t=this;return this.specialHours&&Object.keys(this.specialHours).length?Array(7).fill("").map((function(e,n){var i=t.specialHours[n+1]||[];return Array.isArray(i)||(i=[i]),e=[],i.forEach((function(t,i){var r=t.from,a=t.to,s=t.class;e[i]={day:n+1,from:[null,void 0].includes(r)?null:1*r,to:[null,void 0].includes(a)?null:1*a,class:s||""}})),e})):{}},viewTitle:function(){var t=this.utils.date,e="",n=this.view.startDate,i=n.getFullYear(),r=n.getMonth();switch(this.view.id){case"years":e=this.texts.years;break;case"year":e=i;break;case"month":e="".concat(this.months[r].label," ").concat(i);break;case"week":var a=this.view.endDate,s=n.getFullYear(),o=this.texts.months[n.getMonth()];this.xsmall&&(o=o.substring(0,3));var l="".concat(o," ").concat(s);if(a.getMonth()!==n.getMonth()){var u=a.getFullYear(),c=this.texts.months[a.getMonth()];this.xsmall&&(c=c.substring(0,3)),l=s===u?"".concat(o," - ").concat(c," ").concat(s):this.small?"".concat(o.substring(0,3)," ").concat(s," - ").concat(c.substring(0,3)," ").concat(u):"".concat(o," ").concat(s," - ").concat(c," ").concat(u)}e="".concat(this.texts.week," ").concat(t.getWeek(this.startWeekOnSunday?t.addDays(n,1):n)," (").concat(l,")");break;case"day":e=this.utils.date.formatDate(n,this.texts.dateFormat,this.texts);break}return e},viewCells:function(){var t=this,e=this.utils.date,n=[],i=null,r=!1;this.watchRealTime||(this.now=new Date);var a=this.now;switch(this.view.id){case"years":i=this.view.startDate.getFullYear(),n=Array.apply(null,Array(25)).map((function(t,n){var r=new Date(i+n,0,1),s=new Date(i+n+1,0,1);return s.setSeconds(-1),{startDate:r,formattedDate:e.formatDateLite(r),endDate:s,content:i+n,current:i+n===a.getFullYear()}}));break;case"year":i=this.view.startDate.getFullYear(),n=Array.apply(null,Array(12)).map((function(n,r){var s=new Date(i,r,1),o=new Date(i,r+1,1);return o.setSeconds(-1),{startDate:s,formattedDate:e.formatDateLite(s),endDate:o,content:t.xsmall?t.months[r].label.substr(0,3):t.months[r].label,current:r===a.getMonth()&&i===a.getFullYear()}}));break;case"month":var s=this.view.startDate.getMonth(),o=new Date(this.view.firstCellDate);r=!1,n=Array.apply(null,Array(42)).map((function(t,n){var i=e.addDays(o,n),a=new Date(i);a.setHours(23,59,59,0);var l=!r&&e.isToday(i)&&!r++;return{startDate:i,formattedDate:e.formatDateLite(i),endDate:a,content:i.getDate(),today:l,outOfScope:i.getMonth()!==s,class:"vuecal__cell--day".concat(i.getDay()||7)}})),(this.hideWeekends||this.hideWeekdays.length)&&(n=n.filter((function(e){var n=e.startDate.getDay()||7;return!(t.hideWeekends&&n>=6||t.hideWeekdays.length&&t.hideWeekdays.includes(n))})));break;case"week":r=!1;var l=this.view.startDate,u=this.weekDays;n=u.map((function(n,i){var a=e.addDays(l,i),s=new Date(a);s.setHours(23,59,59,0);var o=(a.getDay()||7)-1;return{startDate:a,formattedDate:e.formatDateLite(a),endDate:s,today:!r&&e.isToday(a)&&!r++,specialHours:t.specialDayHours[o]||[]}})).filter((function(t,e){return!u[e].hide}));break;case"day":var c=this.view.startDate,d=new Date(this.view.startDate);d.setHours(23,59,59,0);var f=(c.getDay()||7)-1;n=[{startDate:c,formattedDate:e.formatDateLite(c),endDate:d,today:e.isToday(c),specialHours:this.specialDayHours[f]||[]}];break}return n},visibleDaysCount:function(){return this.isDayView?1:7-this.weekDays.reduce((function(t,e){return t+e.hide}),0)},cellWidth:function(){return 100/this.visibleDaysCount},cssClasses:function(){var t,e=this.domEvents,n=e.resizeAnEvent,i=e.dragAnEvent,r=e.dragCreateAnEvent;return t={},Object(l["a"])(t,"vuecal--".concat(this.view.id,"-view"),!0),Object(l["a"])(t,"vuecal--".concat(this.locale),this.locale),Object(l["a"])(t,"vuecal--no-time",!this.time),Object(l["a"])(t,"vuecal--view-with-time",this.hasTimeColumn),Object(l["a"])(t,"vuecal--week-numbers",this.showWeekNumbers&&this.isMonthView),Object(l["a"])(t,"vuecal--twelve-hour",this.twelveHour),Object(l["a"])(t,"vuecal--click-to-navigate",this.clickToNavigate),Object(l["a"])(t,"vuecal--hide-weekends",this.hideWeekends),Object(l["a"])(t,"vuecal--split-days",this.hasSplits),Object(l["a"])(t,"vuecal--sticky-split-labels",this.hasSplits&&this.stickySplitLabels),Object(l["a"])(t,"vuecal--overflow-x",this.minCellWidth&&this.isWeekView||this.hasSplits&&this.minSplitWidth),Object(l["a"])(t,"vuecal--small",this.small),Object(l["a"])(t,"vuecal--xsmall",this.xsmall),Object(l["a"])(t,"vuecal--resizing-event",n._eid),Object(l["a"])(t,"vuecal--drag-creating-event",r.event),Object(l["a"])(t,"vuecal--dragging-event",i._eid),Object(l["a"])(t,"vuecal--events-on-month-view",this.eventsOnMonthView),Object(l["a"])(t,"vuecal--short-events",this.isMonthView&&"short"===this.eventsOnMonthView),Object(l["a"])(t,"vuecal--has-touch","undefined"!==typeof window&&"ontouchstart"in window),t},isYearsOrYearView:function(){return["years","year"].includes(this.view.id)},isYearsView:function(){return"years"===this.view.id},isYearView:function(){return"year"===this.view.id},isMonthView:function(){return"month"===this.view.id},isWeekOrDayView:function(){return["week","day"].includes(this.view.id)},isWeekView:function(){return"week"===this.view.id},isDayView:function(){return"day"===this.view.id}},watch:{events:{handler:function(t,e){this.updateMutableEvents(t),this.addEventsToView()},deep:!0},locale:function(t){this.loadLocale(t)},selectedDate:function(t){this.updateSelectedDate(t)},activeView:function(t){this.switchView(t)}}},_t=Dt,Et=R(_t,s,o,!1,null,null,null),kt=Et.exports;e["default"]=kt},fb6a:function(t,e,n){"use strict";var i=n("23e7"),r=n("861d"),a=n("e8b5"),s=n("23cb"),o=n("50c4"),l=n("fc6a"),u=n("8418"),c=n("b622"),d=n("1dde"),f=d("slice"),v=c("species"),h=[].slice,p=Math.max;i({target:"Array",proto:!0,forced:!f},{slice:function(t,e){var n,i,c,d=l(this),f=o(d.length),y=s(t,f),m=s(void 0===e?f:e,f);if(a(d)&&(n=d.constructor,"function"!=typeof n||n!==Array&&!a(n.prototype)?r(n)&&(n=n[v],null===n&&(n=void 0)):n=void 0,n===Array||void 0===n))return h.call(d,y,m);for(i=new(void 0===n?Array:n)(p(m-y,0)),c=0;y<m;y++,c++)y in d&&u(i,c,d[y]);return i.length=c,i}})},fc6a:function(t,e,n){var i=n("44ad"),r=n("1d80");t.exports=function(t){return i(r(t))}},fdbc:function(t,e){t.exports={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0}},fdbf:function(t,e,n){var i=n("4930");t.exports=i&&!Symbol.sham&&"symbol"==typeof Symbol.iterator},fea9:function(t,e,n){var i=n("da84");t.exports=i.Promise}})["default"]}));
|
w
|
dfe_ctrl_3.rs
|
#[doc = "Register `dfe_ctrl_3` reader"]
pub struct R(crate::R<DFE_CTRL_3_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<DFE_CTRL_3_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::convert::From<crate::R<DFE_CTRL_3_SPEC>> for R {
fn from(reader: crate::R<DFE_CTRL_3_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `dfe_ctrl_3` writer"]
pub struct W(crate::W<DFE_CTRL_3_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<DFE_CTRL_3_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl core::convert::From<crate::W<DFE_CTRL_3_SPEC>> for W {
fn from(writer: crate::W<DFE_CTRL_3_SPEC>) -> Self {
W(writer)
}
}
#[doc = "Field `rx_adc_4s_q_en` reader - "]
pub struct RX_ADC_4S_Q_EN_R(crate::FieldReader<bool, bool>);
impl RX_ADC_4S_Q_EN_R {
pub(crate) fn new(bits: bool) -> Self
|
}
impl core::ops::Deref for RX_ADC_4S_Q_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `rx_adc_4s_q_en` writer - "]
pub struct RX_ADC_4S_Q_EN_W<'a> {
w: &'a mut W,
}
impl<'a> RX_ADC_4S_Q_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | ((value as u32 & 0x01) << 26);
self.w
}
}
#[doc = "Field `rx_adc_4s_q_val` reader - "]
pub struct RX_ADC_4S_Q_VAL_R(crate::FieldReader<u16, u16>);
impl RX_ADC_4S_Q_VAL_R {
pub(crate) fn new(bits: u16) -> Self {
RX_ADC_4S_Q_VAL_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RX_ADC_4S_Q_VAL_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `rx_adc_4s_q_val` writer - "]
pub struct RX_ADC_4S_Q_VAL_W<'a> {
w: &'a mut W,
}
impl<'a> RX_ADC_4S_Q_VAL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03ff << 16)) | ((value as u32 & 0x03ff) << 16);
self.w
}
}
#[doc = "Field `rx_adc_4s_i_en` reader - "]
pub struct RX_ADC_4S_I_EN_R(crate::FieldReader<bool, bool>);
impl RX_ADC_4S_I_EN_R {
pub(crate) fn new(bits: bool) -> Self {
RX_ADC_4S_I_EN_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RX_ADC_4S_I_EN_R {
type Target = crate::FieldReader<bool, bool>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `rx_adc_4s_i_en` writer - "]
pub struct RX_ADC_4S_I_EN_W<'a> {
w: &'a mut W,
}
impl<'a> RX_ADC_4S_I_EN_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | ((value as u32 & 0x01) << 10);
self.w
}
}
#[doc = "Field `rx_adc_4s_i_val` reader - "]
pub struct RX_ADC_4S_I_VAL_R(crate::FieldReader<u16, u16>);
impl RX_ADC_4S_I_VAL_R {
pub(crate) fn new(bits: u16) -> Self {
RX_ADC_4S_I_VAL_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for RX_ADC_4S_I_VAL_R {
type Target = crate::FieldReader<u16, u16>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `rx_adc_4s_i_val` writer - "]
pub struct RX_ADC_4S_I_VAL_W<'a> {
w: &'a mut W,
}
impl<'a> RX_ADC_4S_I_VAL_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u16) -> &'a mut W {
self.w.bits = (self.w.bits & !0x03ff) | (value as u32 & 0x03ff);
self.w
}
}
impl R {
#[doc = "Bit 26"]
#[inline(always)]
pub fn rx_adc_4s_q_en(&self) -> RX_ADC_4S_Q_EN_R {
RX_ADC_4S_Q_EN_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bits 16:25"]
#[inline(always)]
pub fn rx_adc_4s_q_val(&self) -> RX_ADC_4S_Q_VAL_R {
RX_ADC_4S_Q_VAL_R::new(((self.bits >> 16) & 0x03ff) as u16)
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn rx_adc_4s_i_en(&self) -> RX_ADC_4S_I_EN_R {
RX_ADC_4S_I_EN_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bits 0:9"]
#[inline(always)]
pub fn rx_adc_4s_i_val(&self) -> RX_ADC_4S_I_VAL_R {
RX_ADC_4S_I_VAL_R::new((self.bits & 0x03ff) as u16)
}
}
impl W {
#[doc = "Bit 26"]
#[inline(always)]
pub fn rx_adc_4s_q_en(&mut self) -> RX_ADC_4S_Q_EN_W {
RX_ADC_4S_Q_EN_W { w: self }
}
#[doc = "Bits 16:25"]
#[inline(always)]
pub fn rx_adc_4s_q_val(&mut self) -> RX_ADC_4S_Q_VAL_W {
RX_ADC_4S_Q_VAL_W { w: self }
}
#[doc = "Bit 10"]
#[inline(always)]
pub fn rx_adc_4s_i_en(&mut self) -> RX_ADC_4S_I_EN_W {
RX_ADC_4S_I_EN_W { w: self }
}
#[doc = "Bits 0:9"]
#[inline(always)]
pub fn rx_adc_4s_i_val(&mut self) -> RX_ADC_4S_I_VAL_W {
RX_ADC_4S_I_VAL_W { w: self }
}
#[doc = "Writes raw bits to the register."]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "dfe_ctrl_3.\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [dfe_ctrl_3](index.html) module"]
pub struct DFE_CTRL_3_SPEC;
impl crate::RegisterSpec for DFE_CTRL_3_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [dfe_ctrl_3::R](R) reader structure"]
impl crate::Readable for DFE_CTRL_3_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [dfe_ctrl_3::W](W) writer structure"]
impl crate::Writable for DFE_CTRL_3_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets dfe_ctrl_3 to value 0"]
impl crate::Resettable for DFE_CTRL_3_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
|
{
RX_ADC_4S_Q_EN_R(crate::FieldReader::new(bits))
}
|
PluginManager.py
|
import sys
from functools import partial
import pluginmanager
import six
import plugin
from utilities.GeneralUtilities import warning, error, executable_exists
class PluginManager(object):
"""
Frontend for pluginmanager
https://github.com/benhoff/pluginmanager
Also handles plugin.PluginComposed
"""
def __init__(self):
self._backend = pluginmanager.PluginInterface()
self._plugin_dependency = PluginDependency()
self._cache = None
self._plugins_loaded = 0
self._cache_disabled = []
# blacklist files
def __ends_with_py(s):
return [x for x in s if x.endswith(".py")]
self._backend.set_file_filters(__ends_with_py)
self._backend.add_blacklisted_directories("jarviscli/packages/aiml")
self._backend.add_blacklisted_directories("jarviscli/packages/memory")
def add_directory(self, path):
"""Add directory to search path for plugins"""
self._backend.add_plugin_directories(path)
self._cache = None
def add_plugin(self, plugin):
"""Add singe plugin-instance"""
self._backend.add_plugins(plugin)
def _load(self):
"""lazy load"""
if self._cache is not None:
# cache clean!
return
self._cache = plugin.PluginStorage()
self._backend.collect_plugins()
(enabled, disabled) = self._validate_plugins(self._backend.get_plugins())
for plugin_to_add in enabled:
self._load_plugin(plugin_to_add, self._cache)
self._cache_disabled = self._filter_duplicated_disabled(
enabled, disabled)
self._plugins_loaded = len(enabled)
def _validate_plugins(self, plugins):
def partition(plugins):
plugins_valid = []
plugins_incompatible = []
for plugin_to_validate in plugins:
if not is_plugin(plugin_to_validate):
continue
compability_check_result = self._plugin_dependency.check(
plugin_to_validate)
if compability_check_result is True:
plugins_valid.append(plugin_to_validate)
else:
item = (
plugin_to_validate.get_name(),
compability_check_result)
plugins_incompatible.append(item)
return (plugins_valid, plugins_incompatible)
def is_plugin(plugin_to_validate):
if not isinstance(plugin_to_validate, pluginmanager.IPlugin):
return False
if plugin_to_validate.get_name() == "plugin":
return False
return True
return partition(plugins)
def _load_plugin(self, plugin_to_add, plugin_storage):
def handle_aliases(plugin_to_add):
add_plugin(
plugin_to_add.get_name().split(' '),
plugin_to_add,
plugin_storage)
for name in plugin_to_add.alias():
add_plugin(
name.lower().split(' '),
plugin_to_add,
plugin_storage)
def add_plugin(name, plugin_to_add, parent):
if len(name) == 1:
add_plugin_single(name[0], plugin_to_add, parent)
else:
add_plugin_compose(name[0], name[1:], plugin_to_add, parent)
def add_plugin_single(name, plugin_to_add, parent):
plugin_existing = parent.get_plugins(name)
if plugin_existing is None:
parent.add_plugin(name, plugin_to_add)
else:
if not plugin_existing.is_callable_plugin():
parent.update_plugin(name, plugin_to_add)
else:
error("Duplicated plugin {}!".format(name))
def add_plugin_compose(
name_first,
name_remaining,
plugin_to_add,
parent):
plugin_existing = parent.get_plugins(name_first)
if plugin_existing is None:
plugin_existing = plugin.Plugin()
plugin_existing._name = name_first
plugin_existing.__doc__ = ''
parent.add_plugin(name_first, plugin_existing)
add_plugin(name_remaining, plugin_to_add, plugin_existing)
return handle_aliases(plugin_to_add)
def _filter_duplicated_disabled(self, enabled_list, disabled_list):
enabled_names = []
for plugin_enabled in enabled_list:
enabled_names.append(plugin_enabled.get_name())
enabled_names.extend(plugin_enabled.alias())
disabled_unique = {}
for plugin_name, disable_reason in disabled_list:
if plugin_name in enabled_names:
continue
if plugin_name in disabled_unique:
disabled_unique[plugin_name].append(disable_reason)
else:
disabled_unique[plugin_name] = [disable_reason]
return disabled_unique
def get_plugins(self):
"""
Returns all loaded plugins as dictionary
Key: name
Value: plugin instance)
"""
self._load()
return self._cache.get_plugins()
def get_disabled(self):
"""
Returns all disabled plugins names as dictionary
Key: name
Value: List of reasons why disabled
"""
self._load()
return self._cache_disabled
def get_number_plugins_loaded(self):
|
class PluginDependency(object):
"""
Plugins may have requirement - specified by require().
Please refere plugin-doku.
This module checks if dependencies are fulfilled.
"""
def __init__(self):
# plugin shoud match these requirements
self._requirement_has_network = True
if six.PY2:
self._requirement_python = plugin.PYTHON2
else:
self._requirement_python = plugin.PYTHON3
if sys.platform == "darwin":
self._requirement_platform = plugin.MACOS
else:
self._requirement_platform = plugin.LINUX
def _plugin_get_requirements(self, requirements_iter):
plugin_requirements = {
"platform": [],
"python": [],
"network": [],
"native": []
}
# parse requirements
for requirement in requirements_iter:
key = requirement[0]
values = requirement[1]
if isinstance(values, str) or isinstance(values, bool):
values = [values]
if key in plugin_requirements:
plugin_requirements[key].extend(values)
else:
warning("{}={}: No supportet requirement".format(key, values))
return plugin_requirements
def check(self, plugin):
"""
Parses plugin.require(). Plase refere plugin.Plugin-documentation
"""
plugin_requirements = self._plugin_get_requirements(plugin.require())
if not self._check_platform(plugin_requirements["platform"]):
required_platform = ", ".join(plugin_requirements["platform"])
return "Requires os {}".format(required_platform)
if not self._check_python(plugin_requirements["python"]):
required_python = ", ".join(plugin_requirements["python"])
return "Requires Python {}".format(required_python)
if not self._check_network(plugin_requirements["network"], plugin):
return "Requires networking"
natives_ok = self._check_native(plugin_requirements["native"], plugin)
if natives_ok is not True:
return natives_ok
return True
def _check_platform(self, values):
if not values:
return True
return self._requirement_platform in values
def _check_python(self, values):
if not values:
return True
return self._requirement_python in values
def _check_network(self, values, plugin):
if True in values:
if not self._requirement_has_network:
return False
self._plugin_patch_network_error_message(plugin)
return True
return True
def _check_native(self, values, plugin):
missing = ""
for native in values:
if not executable_exists(native):
missing += native
missing += " "
if not missing:
return True
message = "Missing native executables {}"
return message.format(missing)
def _plugin_patch_network_error_message(self, plugin):
if "plugin._network_error_patched" not in plugin.__dict__:
plugin.run = partial(
plugin._plugin_run_with_network_error, plugin.run)
|
self._load()
return self._plugins_loaded
|
reader.rs
|
use std::borrow::Cow;
pub(crate) struct SliceReader<'s> {
buffer: &'s [u8],
index: usize,
}
#[allow(dead_code)]
impl<'s> SliceReader<'s> {
pub(crate) fn new(buffer: &'s [u8]) -> Self {
Self { buffer, index: 0 }
}
#[inline]
pub(crate) fn read_u8(&mut self) -> Option<u8> {
if self.index < self.buffer.len() {
let byte = self.buffer[self.index];
self.index += 1;
Some(byte)
} else {
None
}
}
pub(crate) fn read_f64(&mut self) -> Option<f64> {
if self.index + 7 < self.buffer.len() {
let mut buf = [0; 8];
buf.copy_from_slice(&self.buffer[self.index..self.index + 8]);
self.index += 8;
Some(f64::from_be_bytes(buf))
} else {
None
}
}
pub(crate) fn read_int(&mut self, bytes: usize) -> Option<u64> {
if (bytes > 0 && bytes <= 8) && (self.index + bytes - 1 < self.buffer.len()) {
let mut buf = [0; 8];
buf[8 - bytes..].copy_from_slice(&self.buffer[self.index..self.index + bytes]);
self.index += bytes;
Some(u64::from_be_bytes(buf))
} else {
None
}
}
pub(crate) fn read_bytes(&mut self, len: usize) -> Option<&'s [u8]> {
if self.index + len - 1 < self.buffer.len() {
let bytes = &self.buffer[self.index..self.index + len];
self.index += len;
Some(bytes)
} else {
None
}
}
pub(crate) fn read_string(&mut self, len: usize) -> Option<Cow<'s, str>> {
if self.index + len - 1 < self.buffer.len()
|
else {
None
}
}
}
|
{
let s = String::from_utf8_lossy(&self.buffer[self.index..self.index + len]);
self.index += len;
Some(s)
}
|
init.go
|
package airdrop
import (
"flag"
"fmt"
"github.com/fioprotocol/fio-go"
"log"
"os"
"strconv"
"time"
)
const maxRetries = 3
func init() {
log.SetPrefix(" [airdrop] ")
log.SetFlags(log.LstdFlags | log.Lshortfile | log.Lmsgprefix)
}
func Setup() (acc *fio.Account, api *fio.API, tokens float64, reportFile *os.File, dryRun bool) {
var err error
var nodeos, privKey, tpid, file string
flag.StringVar(&nodeos, "u", "", "nodoes URL to connect to, env var: NODEOS")
flag.StringVar(&privKey, "k", "", "WIF key to use, env var: WIF")
flag.StringVar(&tpid, "t", "", "TPID for transactions, env var: TPID")
flag.StringVar(&file, "out", "", "filename for saving CSV of results, default stdout, env var: OUT")
flag.Float64Var(&tokens, "amount", 50.0, "amount to send in airdrop, env var: AMOUNT")
flag.BoolVar(&dryRun, "dry-run", false, "do not send tokens, only show what would be done")
flag.Parse()
if os.Getenv("NODOES") != "" {
nodeos = os.Getenv("NODEOS")
}
if os.Getenv("OUT") != "" {
file = os.Getenv("OUT")
}
if os.Getenv("WIF") != "" {
privKey = os.Getenv("WIF")
}
if os.Getenv("TPID") != "" {
tpid = os.Getenv("TPID")
}
if os.Getenv("AMOUNT") != "" {
var t float64
t, err = strconv.ParseFloat(os.Getenv("AMOUNT"), 64)
if err != nil {
log.Fatal("invalid value for AMOUNT: " + err.Error())
}
tokens = t
}
if tokens == 0
|
if file != "" && !dryRun {
reportFile, err = os.OpenFile(file, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0644)
if err != nil {
log.Fatal(err)
}
}
acc, api, _, err = fio.NewWifConnect(privKey, nodeos)
if err != nil {
log.Fatal(err)
}
if !api.HasHistory() && !dryRun {
log.Fatal("This tool requires a v1 history node to confirm transactions after finality")
}
log.Println("v1 history API is available")
if tpid != "" && fio.SetTpid(tpid) {
log.Println("set TPID to " + tpid)
}
gi, err := api.GetInfo()
if err != nil {
log.Fatal(err)
}
if gi.HeadBlockTime.Time.Before(time.Now().Add(-3 * time.Minute)) {
log.Fatal("head block time is more than 3 minutes behind actual time, is this node syncing?")
}
log.Println("node appears to be synced, starting airdrop.")
if gi.ChainID.String() == fio.ChainIdMainnet {
fmt.Println("\n***************** WARNING ***************** ")
fmt.Println(" Mainnet ChainID detected!")
fmt.Println(" this will spend real tokens")
fmt.Println("sleeping 10 seconds, press CTRL-C to abort.")
fmt.Println("***************** WARNING ***************** ")
fmt.Println("")
time.Sleep(10 * time.Second)
}
return
}
|
{
log.Fatal("amount for airdrop must be non-zero")
}
|
classdef.rs
|
// (C) 2020 Srimanta Barua <[email protected]>
use fnv::FnvHashMap;
use crate::common::GlyphID;
use crate::error::*;
use crate::types::get_u16;
#[derive(Debug)]
pub(crate) struct ClassRangeRecord {
start_glyph: u16,
end_glyph: u16,
class: u16,
}
/// Wrapper around class definition table
#[derive(Debug)]
pub(crate) enum ClassDef {
Fmt1 {
start_glyph: u16,
class_values: Vec<u16>,
},
Fmt2 {
map: FnvHashMap<u16, u16>,
ranges: Vec<ClassRangeRecord>,
},
}
impl ClassDef {
pub(crate) fn load(data: &[u8]) -> Result<ClassDef> {
match get_u16(data, 0)? {
1 => {
let start_glyph = get_u16(data, 2)?;
let glyph_count = get_u16(data, 4)? as usize;
let mut class_values = Vec::new();
for off in (6..6 + glyph_count * 2).step_by(2) {
class_values.push(get_u16(data, off)?);
}
Ok(ClassDef::Fmt1 {
start_glyph,
class_values,
})
}
2 => {
let range_count = get_u16(data, 2)? as usize;
let mut map = FnvHashMap::default();
let mut ranges = Vec::new();
for off in (4..4 + range_count * 6).step_by(6) {
let start_glyph = get_u16(data, off)?;
let end_glyph = get_u16(data, off + 2)?;
let class = get_u16(data, off + 4)?;
if start_glyph == end_glyph {
map.insert(start_glyph, class);
} else
|
}
Ok(ClassDef::Fmt2 { map, ranges })
}
_ => Err(Error::Invalid),
}
}
pub(crate) fn glyph_class(&self, glyph: GlyphID) -> Option<u32> {
match self {
ClassDef::Fmt1 {
start_glyph,
class_values,
} => {
if glyph.0 < *start_glyph as u32
|| glyph.0 >= *start_glyph as u32 + class_values.len() as u32
{
None
} else {
Some(class_values[(glyph.0 - *start_glyph as u32) as usize] as u32)
}
}
ClassDef::Fmt2 { map, ranges } => {
if let Some(g) = map.get(&(glyph.0 as u16)) {
return Some(*g as u32);
}
for range in ranges {
if (range.end_glyph as u32) < glyph.0 {
break;
}
if (range.start_glyph as u32) > glyph.0 {
continue;
}
return Some((glyph.0 - range.start_glyph as u32) + range.class as u32);
}
None
}
}
}
}
|
{
ranges.push(ClassRangeRecord {
start_glyph,
end_glyph,
class,
});
}
|
trainer.py
|
from sklearn.linear_model import LinearRegression
from sklearn.model_selection import train_test_split
import pandas as pd
import sqlalchemy
from config.config import symbol,backward_steps
import joblib
from df_functions import *
def prepare_single_dataset(df,remove_from_heads:int,remove_from_tails:int,label:int):
|
for _ in range(remove_from_heads):
remove_row_from_head(df_copy)
add_id(df_copy)
df_copy.time = df_copy.time.apply(lambda x: x.value)
df_copy.rename(columns={"time": "time{}".format(label)
, "price": "price{}".format(label)
, "quantity":"quantity{}".format(label)}
,inplace=True)
df_copy.drop(columns=['symbol'],inplace=True)
return df_copy
def prepare_dataset(df,steps:int):
datasets = []
for i in range(1,steps):
datasets.append(prepare_single_dataset(df,steps-i,i-1,i))
df_target = prepare_single_dataset(df,0,steps-1,steps)
result = datasets.pop()
while len(datasets)>0:
result = pd.merge(result, datasets.pop(), on="ID")
target = df_target['price{}'.format(steps)]
return result,target
def main():
# open database
engine = sqlalchemy.create_engine('sqlite:///data/{}_stream.db'.format(symbol))
df = pd.read_sql(symbol,engine)
# prepare dataset
source,target = prepare_dataset(df,backward_steps)
# train model
model = LinearRegression()
X_train,X_test,y_train,y_test = train_test_split(source,target,test_size=0.33)
model.fit(X_train,y_train)
# evaluate model
score = model.score(X_test,y_test)
print('score: ',score)
# save model
filename = 'models/model_{}.sav'.format(score)
joblib.dump(model, filename)
#model = joblib.load(filename)
if __name__=='__main__':
main()
|
df_copy = df.copy()
for _ in range(remove_from_tails):
remove_row_from_tail(df_copy)
|
tables.py
|
# Copyright 2013, Big Switch Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.template import defaultfilters as filters
from django.utils import http
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy
from horizon import exceptions
from horizon import tables
from openstack_dashboard import api
from openstack_dashboard import policy
class AddPoolLink(tables.LinkAction):
name = "addpool"
verbose_name = _("Add Pool")
url = "horizon:project:loadbalancers:addpool"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_pool"),)
class AddVipLink(tables.LinkAction):
name = "addvip"
verbose_name = _("Add VIP")
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_vip"),)
def get_link_url(self, pool):
base_url = reverse("horizon:project:loadbalancers:addvip",
kwargs={'pool_id': pool.id})
return base_url
def allowed(self, request, datum=None):
if datum and datum.vip_id:
return False
return True
class AddMemberLink(tables.LinkAction):
name = "addmember"
verbose_name = _("Add Member")
url = "horizon:project:loadbalancers:addmember"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_member"),)
|
class AddMonitorLink(tables.LinkAction):
name = "addmonitor"
verbose_name = _("Add Monitor")
url = "horizon:project:loadbalancers:addmonitor"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_health_monitor"),)
class DeleteVipLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletevip"
policy_rules = (("network", "delete_vip"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete VIP",
u"Delete VIPs",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of VIP",
u"Scheduled deletion of VIPs",
count
)
def allowed(self, request, datum=None):
if datum and not datum.vip_id:
return False
return True
class DeletePoolLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletepool"
policy_rules = (("network", "delete_pool"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Pool",
u"Delete Pools",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Pool",
u"Scheduled deletion of Pools",
count
)
def allowed(self, request, datum=None):
if datum and datum.vip_id:
return False
return True
class DeleteMonitorLink(policy.PolicyTargetMixin,
tables.DeleteAction):
name = "deletemonitor"
policy_rules = (("network", "delete_health_monitor"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Monitor",
u"Delete Monitors",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Monitor",
u"Scheduled deletion of Monitors",
count
)
class DeleteMemberLink(policy.PolicyTargetMixin, tables.DeleteAction):
name = "deletemember"
policy_rules = (("network", "delete_member"),)
@staticmethod
def action_present(count):
return ungettext_lazy(
u"Delete Member",
u"Delete Members",
count
)
@staticmethod
def action_past(count):
return ungettext_lazy(
u"Scheduled deletion of Member",
u"Scheduled deletion of Members",
count
)
class UpdatePoolLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatepool"
verbose_name = _("Edit Pool")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_pool"),)
def get_link_url(self, pool):
base_url = reverse("horizon:project:loadbalancers:updatepool",
kwargs={'pool_id': pool.id})
return base_url
class UpdateVipLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatevip"
verbose_name = _("Edit VIP")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_vip"),)
def get_link_url(self, pool):
base_url = reverse("horizon:project:loadbalancers:updatevip",
kwargs={'vip_id': pool.vip_id})
return base_url
def allowed(self, request, datum=None):
if datum and not datum.vip_id:
return False
return True
class UpdateMemberLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatemember"
verbose_name = _("Edit Member")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_member"),)
def get_link_url(self, member):
base_url = reverse("horizon:project:loadbalancers:updatemember",
kwargs={'member_id': member.id})
return base_url
class UpdateMonitorLink(policy.PolicyTargetMixin, tables.LinkAction):
name = "updatemonitor"
verbose_name = _("Edit Monitor")
classes = ("ajax-modal", "btn-update",)
policy_rules = (("network", "update_health_monitor"),)
def get_link_url(self, monitor):
base_url = reverse("horizon:project:loadbalancers:updatemonitor",
kwargs={'monitor_id': monitor.id})
return base_url
def get_vip_link(pool):
if pool.vip_id:
return reverse("horizon:project:loadbalancers:vipdetails",
args=(http.urlquote(pool.vip_id),))
else:
return None
class AddPMAssociationLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "addassociation"
verbose_name = _("Associate Monitor")
url = "horizon:project:loadbalancers:addassociation"
classes = ("ajax-modal",)
icon = "plus"
policy_rules = (("network", "create_pool_health_monitor"),)
def allowed(self, request, datum=None):
try:
tenant_id = request.user.tenant_id
monitors = api.lbaas.pool_health_monitor_list(request,
tenant_id=tenant_id)
for m in monitors:
if m.id not in datum['health_monitors']:
return True
except Exception:
exceptions.handle(request,
_('Failed to retrieve health monitors.'))
return False
class DeletePMAssociationLink(policy.PolicyTargetMixin,
tables.LinkAction):
name = "deleteassociation"
verbose_name = _("Disassociate Monitor")
url = "horizon:project:loadbalancers:deleteassociation"
classes = ("ajax-modal", "btn-danger")
icon = "remove"
policy_rules = (("network", "delete_pool_health_monitor"),)
def allowed(self, request, datum=None):
if datum and not datum['health_monitors']:
return False
return True
class PoolsTable(tables.DataTable):
name = tables.Column("name_or_id",
verbose_name=_("Name"),
link="horizon:project:loadbalancers:pooldetails")
description = tables.Column('description', verbose_name=_("Description"))
provider = tables.Column('provider', verbose_name=_("Provider"),
filters=(lambda v: filters.default(v, _('N/A')),))
subnet_name = tables.Column('subnet_name', verbose_name=_("Subnet"))
protocol = tables.Column('protocol', verbose_name=_("Protocol"))
status = tables.Column('status', verbose_name=_("Status"))
vip_name = tables.Column('vip_name', verbose_name=_("VIP"),
link=get_vip_link)
class Meta(object):
name = "poolstable"
verbose_name = _("Pools")
table_actions = (AddPoolLink, DeletePoolLink)
row_actions = (UpdatePoolLink, AddVipLink, UpdateVipLink,
DeleteVipLink, AddPMAssociationLink,
DeletePMAssociationLink, DeletePoolLink)
def get_pool_link(member):
return reverse("horizon:project:loadbalancers:pooldetails",
args=(http.urlquote(member.pool_id),))
def get_member_link(member):
return reverse("horizon:project:loadbalancers:memberdetails",
args=(http.urlquote(member.id),))
class MembersTable(tables.DataTable):
address = tables.Column('address',
verbose_name=_("IP Address"),
link=get_member_link,
attrs={'data-type': "ip"})
protocol_port = tables.Column('protocol_port',
verbose_name=_("Protocol Port"))
weight = tables.Column('weight',
verbose_name=_("Weight"))
pool_name = tables.Column('pool_name',
verbose_name=_("Pool"), link=get_pool_link)
status = tables.Column('status', verbose_name=_("Status"))
class Meta(object):
name = "memberstable"
verbose_name = _("Members")
table_actions = (AddMemberLink, DeleteMemberLink)
row_actions = (UpdateMemberLink, DeleteMemberLink)
def get_monitor_details(monitor):
if monitor.type in ('HTTP', 'HTTPS'):
return ("%(http_method)s %(url_path)s => %(codes)s" %
{'http_method': monitor.http_method,
'url_path': monitor.url_path,
'codes': monitor.expected_codes})
else:
return _("-")
class MonitorsTable(tables.DataTable):
monitor_type = tables.Column(
"type", verbose_name=_("Monitor Type"),
link="horizon:project:loadbalancers:monitordetails")
delay = tables.Column("delay", verbose_name=_("Delay"))
timeout = tables.Column("timeout", verbose_name=_("Timeout"))
max_retries = tables.Column("max_retries", verbose_name=_("Max Retries"))
details = tables.Column(get_monitor_details, verbose_name=_("Details"))
class Meta(object):
name = "monitorstable"
verbose_name = _("Monitors")
table_actions = (AddMonitorLink, DeleteMonitorLink)
row_actions = (UpdateMonitorLink, DeleteMonitorLink)
| |
sysujwxt.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import cookielib
import re
import json
import sys
LOGIN_TIMEOUT = 15
REQUEST_TIMEOUT = 25
# ----------------
# Basic functions
# ----------------
def format_to_json(unformated_json):
# pat = r'(\w+(?=:))'
pat = r'((?:(?<=[,{\[])\s*)(\w+)(?=:))'
sub = r'"\1"'
return re.sub(pat, sub, unformated_json)
def retrive_data(url,cookie, request_json):
cookie = 'JSESSIONID='+cookie
#设置 cookie 处理器
#cj = cookielib.LWPCookieJar()
opener = urllib2.build_opener()
urllib2.install_opener(opener)
opener.addheaders.append(('Cookie',cookie))
request = urllib2.Request(url,request_json)
request.add_header('Content-Type','multipart/form-data')
request.add_header('render','unieap')
try:
response = urllib2.urlopen(request,timeout=REQUEST_TIMEOUT)
except:
return (False, 'timeout')
ret_code = response.getcode()
ret_body = response.read()
return (True, ret_body)
def login(username, passward):
url = 'http://uems.sysu.edu.cn/jwxt/j_unieap_security_check.do'
#设置 cookie 处理器
cj = cookielib.LWPCookieJar()
#cj = cookielib.CookieJar()
cookie_support = urllib2.HTTPCookieProcessor(cj)
opener = urllib2.build_opener(cookie_support, urllib2.HTTPHandler)
urllib2.install_opener(opener)
postData = urllib.urlencode({'j_username': username, 'j_password': passward})
request = urllib2.Request(url,postData)
try:
response = urllib2.urlopen(request,timeout=LOGIN_TIMEOUT)
except:
|
ret_code = response.getcode()
ret_body = response.read()
cookie = ""
for item in cj:
if item.name == 'JSESSIONID':
cookie = item.value
break
else:
cookie = "error"
if cookie == 'error':
return (False,'errorpass')
return (True, cookie)
def get_course_result(cookie,year,term):
url = 'http://uems.sysu.edu.cn/jwxt/xstk/xstk.action?method=getXsxkjgxxlistByxh '
query_json = '''
{
"header": {
"code": -100,
"message": {
"title": "",
"detail": ""
}
},
"body": {
"dataStores": {
"xsxkjgStore": {
"rowSet": {
"primary": [ ],
"filter": [ ],
"delete": [ ]
},
"name": "xsxkjgStore",
"pageNumber": 1,
"pageSize": 20,
"recordCount": 62,
"rowSetName": "pojo_com.neusoft.education.sysu.xk.xkjg.entity.XkjgxxEntity",
"order": "xkjg.xnd desc,xkjg.xq desc, xkjg.jxbh"
}
},
"parameters": {
"xsxkjgStore-params": [
{
"name": "Filter_xkjg.xnd_0.9842467070587848",
"type": "String",
"value": "'%s'",
"condition": " = ",
"property": "xkjg.xnd"
},
{
"name": "Filter_xkjg.xq_0.30827901561365295",
"type": "String",
"value": "'%s'",
"condition": " = ",
"property": "xkjg.xq"
}
],
"args": [ ]
}
}
}
'''%(year,term)
return retrive_data(url, cookie, query_json)
# --------------------
# Personal info Query
# --------------------
def get_info(cookie):
url = "http://uems.sysu.edu.cn/jwxt/WhzdAction/WhzdAction.action?method=getGrwhxxList"
query_json = """
{
header: {
"code": -100,
"message": {
"title": "",
"detail": ""
}
},
body: {
dataStores: {
xsxxStore: {
rowSet: {
"primary": [],
"filter": [],
"delete": []
},
name: "xsxxStore",
pageNumber: 1,
pageSize: 10,
recordCount: 0,
rowSetName: "pojo_com.neusoft.education.sysu.xj.grwh.model.Xsgrwhxx"
}
},
parameters: {
"args": [""]
}
}
}
"""
return retrive_data(url, cookie, query_json)
def get_course(cookie,year,term):
#获取学生课程,返回课程列表,每个课程为一个字典
result = get_course_result(cookie.encode('ascii'),year.encode('ascii'),term.encode('ascii'))
if result[0] == True:
result = format_to_json(result[1])
data = json.loads(result)
data = data['body']['dataStores']['xsxkjgStore']['rowSet']['primary']
course = []
i = 0
for item in data:
#print item
one = dict()
one['cou_id'] = item['jxbh']
one['course_name'] = item['kcmc']
one['teacher'] = item['xm'].split(',')[0]
one['time'] = item['sksjdd']
course.append(one)
return (True,course)
else:
return (False,None)
def get_student_info(cookie):
#获取学生个人信息
result = get_info(cookie)
if result[0] == True:
result = format_to_json(result[1])
data = json.loads(result)
data = data['body']['dataStores']['xsxxStore']['rowSet']['primary'][0]
info = dict()
info['stuID'] = data['xh'] #学号
info['name'] = data['xm'] #姓名
info['sex'] = data['xbm'] #可能为性别
info['school'] = data['xymc'] #学院
info['major'] = data['zyfxmc'] #专业
info['grade'] = data['njmc'] #年级
return (True,info)
else:
return (False,None)
#测试
if __name__ == "__main__":
username = sys.argv[1] #用户名
password = sys.argv[2] #密码
res, cookie = login(username, password)
if res:
year = '2013-2014' #学期
term = '2' #第几学期,1,2, 3
result = get_course(cookie,year,term)
if result[0] == True:
result = result[1];
for course in result:
print 'The course ID : %s' % course['cou_id']
print 'The course name: %s' % course['course_name']
print 'The Teacher is : %s' % course['teacher']
print 'The time is : %s ' % course['time']
print '\n'
else:
print('course error')
result = get_student_info(cookie)
if result[0] == True:
result = result[1]
print result['stuID']
print result['name']
print result['grade']
print result['school']
print result['major']
else:
print 'get info error'
else:
print('login error')
print cookie
|
return (False, 'timeout')
|
typeck_type_placeholder_mismatch.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test checks that genuine type errors with partial
// type hints are understandable.
struct Foo<T>;
struct Bar<U>;
pub fn
|
() {
}
fn test1() {
let x: Foo<_> = Bar::<uint>;
//~^ ERROR mismatched types: expected `Foo<<generic #0>>` but found `Bar<uint>`
let y: Foo<uint> = x;
}
fn test2() {
let x: Foo<_> = Bar::<uint>;
//~^ ERROR mismatched types: expected `Foo<<generic #0>>` but found `Bar<uint>`
}
|
main
|
metrics.py
|
from collections import Counter
from itertools import groupby
from math import log2
import numpy as np
def segments_start(array):
return [i for i in range(len(array)) if i == 0 or array[i] != array[i-1]]
def split_sequences(array, start):
end = start[1:] + [len(array)]
return [array[s:e] for s, e in zip(start, end)]
def coverage_top_1(labels, codes):
'''
Computes the coverage of label segments by the most frequent co-occuring
code.
'''
start = segments_start(labels)
segments = split_sequences(codes, start)
return [sorted(Counter(s).values())[-1] / len(s) for s in segments]
def compute_joint_probability(x, y):
|
def conditional_entropy(x, y):
labels_x, labels_y, p_xy = compute_joint_probability(x, y)
p_y = np.sum(p_xy, axis=0)
h_x_y = 0
for i_x in range(len(labels_x)):
for i_y in range(len(labels_y)):
if p_xy[i_x, i_y] > 0:
h_x_y -= p_xy[i_x, i_y] * log2(p_xy[i_x, i_y] / p_y[i_y])
return h_x_y
def count_repetitions(array):
return [len(list(v)) for _, v in groupby(array)]
|
labels_x = np.unique(x)
idx_x = {v: i for i, v in enumerate(labels_x)}
labels_y = np.unique(y)
idx_y = {v: i for i, v in enumerate(labels_y)}
counts_xy = np.zeros([len(labels_x), len(labels_y)])
for xi, yi in zip(x, y):
counts_xy[idx_x[xi], idx_y[yi]] += 1
return labels_x, labels_y, counts_xy / len(x)
|
input.rs
|
// Code generated by software.amazon.smithy.rust.codegen.smithy-rs. DO NOT EDIT.
use std::fmt::Write;
/// See [`DescribeDimensionKeysInput`](crate::input::DescribeDimensionKeysInput)
pub mod describe_dimension_keys_input {
/// A builder for [`DescribeDimensionKeysInput`](crate::input::DescribeDimensionKeysInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
pub(crate) start_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) end_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) metric: std::option::Option<std::string::String>,
pub(crate) period_in_seconds: std::option::Option<i32>,
pub(crate) group_by: std::option::Option<crate::model::DimensionGroup>,
pub(crate) additional_metrics: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) partition_by: std::option::Option<crate::model::DimensionGroup>,
pub(crate) filter: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
pub(crate) max_results: std::option::Option<i32>,
pub(crate) next_token: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights will return metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>. </p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights will return metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>. </p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use an Amazon RDS instance as a data source, you specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>. </p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use an Amazon RDS instance as a data source, you specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>. </p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// <p>The date and time specifying the beginning of the requested time series data. You must specify a <code>StartTime</code> within the past 7 days. The value specified is <i>inclusive</i>, which means that data points equal to or greater than <code>StartTime</code> are returned. </p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>. </p>
pub fn start_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.start_time = Some(input);
self
}
/// <p>The date and time specifying the beginning of the requested time series data. You must specify a <code>StartTime</code> within the past 7 days. The value specified is <i>inclusive</i>, which means that data points equal to or greater than <code>StartTime</code> are returned. </p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>. </p>
pub fn set_start_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.start_time = input;
self
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i>, which means that data points less than (but not equal to) <code>EndTime</code> are returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn end_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.end_time = Some(input);
self
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i>, which means that data points less than (but not equal to) <code>EndTime</code> are returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn set_end_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.end_time = input;
self
}
/// <p>The name of a Performance Insights metric to be measured.</p>
/// <p>Valid values for <code>Metric</code> are:</p>
/// <ul>
/// <li> <p> <code>db.load.avg</code> - a scaled representation of the number of active sessions for the database engine. </p> </li>
/// <li> <p> <code>db.sampledload.avg</code> - the raw number of active sessions for the database engine. </p> </li>
/// </ul>
/// <p>If the number of active sessions is less than an internal Performance Insights threshold, <code>db.load.avg</code> and <code>db.sampledload.avg</code> are the same value. If the number of active sessions is greater than the internal threshold, Performance Insights samples the active sessions, with <code>db.load.avg</code> showing the scaled values, <code>db.sampledload.avg</code> showing the raw values, and <code>db.sampledload.avg</code> less than <code>db.load.avg</code>. For most use cases, you can query <code>db.load.avg</code> only. </p>
pub fn metric(mut self, input: impl Into<std::string::String>) -> Self {
self.metric = Some(input.into());
self
}
/// <p>The name of a Performance Insights metric to be measured.</p>
/// <p>Valid values for <code>Metric</code> are:</p>
/// <ul>
/// <li> <p> <code>db.load.avg</code> - a scaled representation of the number of active sessions for the database engine. </p> </li>
/// <li> <p> <code>db.sampledload.avg</code> - the raw number of active sessions for the database engine. </p> </li>
/// </ul>
/// <p>If the number of active sessions is less than an internal Performance Insights threshold, <code>db.load.avg</code> and <code>db.sampledload.avg</code> are the same value. If the number of active sessions is greater than the internal threshold, Performance Insights samples the active sessions, with <code>db.load.avg</code> showing the scaled values, <code>db.sampledload.avg</code> showing the raw values, and <code>db.sampledload.avg</code> less than <code>db.load.avg</code>. For most use cases, you can query <code>db.load.avg</code> only. </p>
pub fn set_metric(mut self, input: std::option::Option<std::string::String>) -> Self {
self.metric = input;
self
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are: </p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights chooses a value for you, with a goal of returning roughly 100-200 data points in the response. </p>
pub fn period_in_seconds(mut self, input: i32) -> Self {
self.period_in_seconds = Some(input);
self
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are: </p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights chooses a value for you, with a goal of returning roughly 100-200 data points in the response. </p>
pub fn set_period_in_seconds(mut self, input: std::option::Option<i32>) -> Self {
self.period_in_seconds = input;
self
}
/// <p>A specification for how to aggregate the data points from a query result. You must specify a valid dimension group. Performance Insights returns all dimensions within this group, unless you provide the names of specific dimensions within this group. You can also request that Performance Insights return a limited number of values for a dimension. </p>
pub fn group_by(mut self, input: crate::model::DimensionGroup) -> Self {
self.group_by = Some(input);
self
}
/// <p>A specification for how to aggregate the data points from a query result. You must specify a valid dimension group. Performance Insights returns all dimensions within this group, unless you provide the names of specific dimensions within this group. You can also request that Performance Insights return a limited number of values for a dimension. </p>
pub fn set_group_by(
mut self,
input: std::option::Option<crate::model::DimensionGroup>,
) -> Self {
self.group_by = input;
self
}
/// Appends an item to `additional_metrics`.
///
/// To override the contents of this collection use [`set_additional_metrics`](Self::set_additional_metrics).
///
/// <p>Additional metrics for the top <code>N</code> dimension keys. If the specified dimension group in the <code>GroupBy</code> parameter is <code>db.sql_tokenized</code>, you can specify per-SQL metrics to get the values for the top <code>N</code> SQL digests. The response syntax is <code>"AdditionalMetrics" : { "<i>string</i>" : "<i>string</i>" }</code>. </p>
/// <p></p>
pub fn additional_metrics(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.additional_metrics.unwrap_or_default();
v.push(input.into());
self.additional_metrics = Some(v);
self
}
/// <p>Additional metrics for the top <code>N</code> dimension keys. If the specified dimension group in the <code>GroupBy</code> parameter is <code>db.sql_tokenized</code>, you can specify per-SQL metrics to get the values for the top <code>N</code> SQL digests. The response syntax is <code>"AdditionalMetrics" : { "<i>string</i>" : "<i>string</i>" }</code>. </p>
/// <p></p>
pub fn set_additional_metrics(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.additional_metrics = input;
self
}
/// <p>For each dimension specified in <code>GroupBy</code>, specify a secondary dimension to further subdivide the partition keys in the response. </p>
pub fn partition_by(mut self, input: crate::model::DimensionGroup) -> Self {
self.partition_by = Some(input);
self
}
/// <p>For each dimension specified in <code>GroupBy</code>, specify a secondary dimension to further subdivide the partition keys in the response. </p>
pub fn set_partition_by(
mut self,
input: std::option::Option<crate::model::DimensionGroup>,
) -> Self {
self.partition_by = input;
self
}
/// Adds a key-value pair to `filter`.
///
/// To override the contents of this collection use [`set_filter`](Self::set_filter).
///
/// <p>One or more filters to apply in the request. Restrictions:</p>
/// <ul>
/// <li> <p>Any number of filters by the same dimension, as specified in the <code>GroupBy</code> or <code>Partition</code> parameters.</p> </li>
/// <li> <p>A single filter for any other dimension in this dimension group.</p> </li>
/// </ul>
pub fn filter(
mut self,
k: impl Into<std::string::String>,
v: impl Into<std::string::String>,
) -> Self {
let mut hash_map = self.filter.unwrap_or_default();
hash_map.insert(k.into(), v.into());
self.filter = Some(hash_map);
self
}
/// <p>One or more filters to apply in the request. Restrictions:</p>
/// <ul>
/// <li> <p>Any number of filters by the same dimension, as specified in the <code>GroupBy</code> or <code>Partition</code> parameters.</p> </li>
/// <li> <p>A single filter for any other dimension in this dimension group.</p> </li>
/// </ul>
pub fn set_filter(
mut self,
input: std::option::Option<
std::collections::HashMap<std::string::String, std::string::String>,
>,
) -> Self {
self.filter = input;
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// Consumes the builder and constructs a [`DescribeDimensionKeysInput`](crate::input::DescribeDimensionKeysInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::DescribeDimensionKeysInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::DescribeDimensionKeysInput {
service_type: self.service_type,
identifier: self.identifier,
start_time: self.start_time,
end_time: self.end_time,
metric: self.metric,
period_in_seconds: self.period_in_seconds,
group_by: self.group_by,
additional_metrics: self.additional_metrics,
partition_by: self.partition_by,
filter: self.filter,
max_results: self.max_results,
next_token: self.next_token,
})
}
}
}
#[doc(hidden)]
pub type DescribeDimensionKeysInputOperationOutputAlias = crate::operation::DescribeDimensionKeys;
#[doc(hidden)]
pub type DescribeDimensionKeysInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl DescribeDimensionKeysInput {
/// Consumes the builder and constructs an Operation<[`DescribeDimensionKeys`](crate::operation::DescribeDimensionKeys)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::DescribeDimensionKeys,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::DescribeDimensionKeysInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::DescribeDimensionKeysInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.DescribeDimensionKeys",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_describe_dimension_keys(
&self,
)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::DescribeDimensionKeys::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"DescribeDimensionKeys",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`DescribeDimensionKeysInput`](crate::input::DescribeDimensionKeysInput)
pub fn builder() -> crate::input::describe_dimension_keys_input::Builder {
crate::input::describe_dimension_keys_input::Builder::default()
}
}
/// See [`GetDimensionKeyDetailsInput`](crate::input::GetDimensionKeyDetailsInput)
pub mod get_dimension_key_details_input {
/// A builder for [`GetDimensionKeyDetailsInput`](crate::input::GetDimensionKeyDetailsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
pub(crate) group: std::option::Option<std::string::String>,
pub(crate) group_identifier: std::option::Option<std::string::String>,
pub(crate) requested_dimensions: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights returns data. The only valid value is <code>RDS</code>.</p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights returns data. The only valid value is <code>RDS</code>.</p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>The ID for a data source from which to gather dimension data. This ID must be immutable and unique within an Amazon Web Services Region. When a DB instance is the data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>The ID for a data source from which to gather dimension data. This ID must be immutable and unique within an Amazon Web Services Region. When a DB instance is the data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// <p>The name of the dimension group. The only valid value is <code>db.sql</code>. Performance Insights searches the specified group for the dimension group ID.</p>
pub fn group(mut self, input: impl Into<std::string::String>) -> Self {
self.group = Some(input.into());
self
}
/// <p>The name of the dimension group. The only valid value is <code>db.sql</code>. Performance Insights searches the specified group for the dimension group ID.</p>
pub fn set_group(mut self, input: std::option::Option<std::string::String>) -> Self {
self.group = input;
self
}
/// <p>The ID of the dimension group from which to retrieve dimension details. For dimension group <code>db.sql</code>, the group ID is <code>db.sql.id</code>.</p>
pub fn group_identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.group_identifier = Some(input.into());
self
}
/// <p>The ID of the dimension group from which to retrieve dimension details. For dimension group <code>db.sql</code>, the group ID is <code>db.sql.id</code>.</p>
pub fn set_group_identifier(
mut self,
input: std::option::Option<std::string::String>,
) -> Self {
self.group_identifier = input;
self
}
/// Appends an item to `requested_dimensions`.
///
/// To override the contents of this collection use [`set_requested_dimensions`](Self::set_requested_dimensions).
///
/// <p>A list of dimensions to retrieve the detail data for within the given dimension group. For the dimension group <code>db.sql</code>, specify either the full dimension name <code>db.sql.statement</code> or the short dimension name <code>statement</code>. If you don't specify this parameter, Performance Insights returns all dimension data within the specified dimension group. </p>
pub fn requested_dimensions(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.requested_dimensions.unwrap_or_default();
v.push(input.into());
self.requested_dimensions = Some(v);
self
}
/// <p>A list of dimensions to retrieve the detail data for within the given dimension group. For the dimension group <code>db.sql</code>, specify either the full dimension name <code>db.sql.statement</code> or the short dimension name <code>statement</code>. If you don't specify this parameter, Performance Insights returns all dimension data within the specified dimension group. </p>
pub fn set_requested_dimensions(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.requested_dimensions = input;
self
}
/// Consumes the builder and constructs a [`GetDimensionKeyDetailsInput`](crate::input::GetDimensionKeyDetailsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::GetDimensionKeyDetailsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::GetDimensionKeyDetailsInput {
service_type: self.service_type,
identifier: self.identifier,
group: self.group,
group_identifier: self.group_identifier,
requested_dimensions: self.requested_dimensions,
})
}
}
}
#[doc(hidden)]
pub type GetDimensionKeyDetailsInputOperationOutputAlias = crate::operation::GetDimensionKeyDetails;
#[doc(hidden)]
pub type GetDimensionKeyDetailsInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl GetDimensionKeyDetailsInput {
/// Consumes the builder and constructs an Operation<[`GetDimensionKeyDetails`](crate::operation::GetDimensionKeyDetails)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::GetDimensionKeyDetails,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::GetDimensionKeyDetailsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::GetDimensionKeyDetailsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.GetDimensionKeyDetails",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_get_dimension_key_details(
&self,
)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::GetDimensionKeyDetails::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"GetDimensionKeyDetails",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`GetDimensionKeyDetailsInput`](crate::input::GetDimensionKeyDetailsInput)
pub fn builder() -> crate::input::get_dimension_key_details_input::Builder {
crate::input::get_dimension_key_details_input::Builder::default()
}
}
/// See [`GetResourceMetadataInput`](crate::input::GetResourceMetadataInput)
pub mod get_resource_metadata_input {
/// A builder for [`GetResourceMetadataInput`](crate::input::GetResourceMetadataInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// Consumes the builder and constructs a [`GetResourceMetadataInput`](crate::input::GetResourceMetadataInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::GetResourceMetadataInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::GetResourceMetadataInput {
service_type: self.service_type,
identifier: self.identifier,
})
}
}
}
#[doc(hidden)]
pub type GetResourceMetadataInputOperationOutputAlias = crate::operation::GetResourceMetadata;
#[doc(hidden)]
pub type GetResourceMetadataInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl GetResourceMetadataInput {
/// Consumes the builder and constructs an Operation<[`GetResourceMetadata`](crate::operation::GetResourceMetadata)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::GetResourceMetadata,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::GetResourceMetadataInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::GetResourceMetadataInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.GetResourceMetadata",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_get_resource_metadata(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::GetResourceMetadata::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"GetResourceMetadata",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`GetResourceMetadataInput`](crate::input::GetResourceMetadataInput)
pub fn builder() -> crate::input::get_resource_metadata_input::Builder {
crate::input::get_resource_metadata_input::Builder::default()
}
}
/// See [`GetResourceMetricsInput`](crate::input::GetResourceMetricsInput)
pub mod get_resource_metrics_input {
/// A builder for [`GetResourceMetricsInput`](crate::input::GetResourceMetricsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
pub(crate) metric_queries: std::option::Option<std::vec::Vec<crate::model::MetricQuery>>,
pub(crate) start_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) end_time: std::option::Option<aws_smithy_types::DateTime>,
pub(crate) period_in_seconds: std::option::Option<i32>,
pub(crate) max_results: std::option::Option<i32>,
pub(crate) next_token: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>.</p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights returns metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>.</p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>.</p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>.</p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// Appends an item to `metric_queries`.
///
/// To override the contents of this collection use [`set_metric_queries`](Self::set_metric_queries).
///
/// <p>An array of one or more queries to perform. Each query must specify a Performance Insights metric, and can optionally specify aggregation and filtering criteria.</p>
pub fn metric_queries(mut self, input: crate::model::MetricQuery) -> Self {
let mut v = self.metric_queries.unwrap_or_default();
v.push(input);
self.metric_queries = Some(v);
self
}
/// <p>An array of one or more queries to perform. Each query must specify a Performance Insights metric, and can optionally specify aggregation and filtering criteria.</p>
pub fn set_metric_queries(
mut self,
input: std::option::Option<std::vec::Vec<crate::model::MetricQuery>>,
) -> Self {
self.metric_queries = input;
self
}
/// <p>The date and time specifying the beginning of the requested time series data. You can't specify a <code>StartTime</code> that's earlier than 7 days ago. The value specified is <i>inclusive</i> - data points equal to or greater than <code>StartTime</code> will be returned.</p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>.</p>
pub fn start_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.start_time = Some(input);
self
}
/// <p>The date and time specifying the beginning of the requested time series data. You can't specify a <code>StartTime</code> that's earlier than 7 days ago. The value specified is <i>inclusive</i> - data points equal to or greater than <code>StartTime</code> will be returned.</p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>.</p>
pub fn set_start_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.start_time = input;
self
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i> - data points less than (but not equal to) <code>EndTime</code> will be returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn end_time(mut self, input: aws_smithy_types::DateTime) -> Self {
self.end_time = Some(input);
self
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i> - data points less than (but not equal to) <code>EndTime</code> will be returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn set_end_time(
mut self,
input: std::option::Option<aws_smithy_types::DateTime>,
) -> Self {
self.end_time = input;
self
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are:</p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights will choose a value for you, with a goal of returning roughly 100-200 data points in the response.</p>
pub fn period_in_seconds(mut self, input: i32) -> Self {
self.period_in_seconds = Some(input);
self
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are:</p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights will choose a value for you, with a goal of returning roughly 100-200 data points in the response.</p>
pub fn set_period_in_seconds(mut self, input: std::option::Option<i32>) -> Self {
self.period_in_seconds = input;
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>.</p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>.</p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// Consumes the builder and constructs a [`GetResourceMetricsInput`](crate::input::GetResourceMetricsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::GetResourceMetricsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::GetResourceMetricsInput {
service_type: self.service_type,
identifier: self.identifier,
metric_queries: self.metric_queries,
start_time: self.start_time,
end_time: self.end_time,
period_in_seconds: self.period_in_seconds,
max_results: self.max_results,
next_token: self.next_token,
})
}
}
}
#[doc(hidden)]
pub type GetResourceMetricsInputOperationOutputAlias = crate::operation::GetResourceMetrics;
#[doc(hidden)]
pub type GetResourceMetricsInputOperationRetryAlias = aws_http::retry::AwsErrorRetryPolicy;
impl GetResourceMetricsInput {
/// Consumes the builder and constructs an Operation<[`GetResourceMetrics`](crate::operation::GetResourceMetrics)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::GetResourceMetrics,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::GetResourceMetricsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::GetResourceMetricsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.GetResourceMetrics",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]
let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_get_resource_metrics(&self)?,
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::GetResourceMetrics::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"GetResourceMetrics",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`GetResourceMetricsInput`](crate::input::GetResourceMetricsInput)
pub fn builder() -> crate::input::get_resource_metrics_input::Builder {
crate::input::get_resource_metrics_input::Builder::default()
}
}
/// See [`ListAvailableResourceDimensionsInput`](crate::input::ListAvailableResourceDimensionsInput)
pub mod list_available_resource_dimensions_input {
/// A builder for [`ListAvailableResourceDimensionsInput`](crate::input::ListAvailableResourceDimensionsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
pub(crate) metrics: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) max_results: std::option::Option<i32>,
pub(crate) next_token: std::option::Option<std::string::String>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// Appends an item to `metrics`.
///
/// To override the contents of this collection use [`set_metrics`](Self::set_metrics).
///
/// <p>The types of metrics for which to retrieve dimensions. Valid values include <code>db.load</code>.</p>
pub fn metrics(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.metrics.unwrap_or_default();
v.push(input.into());
self.metrics = Some(v);
self
}
/// <p>The types of metrics for which to retrieve dimensions. Valid values include <code>db.load</code>.</p>
pub fn set_metrics(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.metrics = input;
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved.</p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved.</p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// Consumes the builder and constructs a [`ListAvailableResourceDimensionsInput`](crate::input::ListAvailableResourceDimensionsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListAvailableResourceDimensionsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListAvailableResourceDimensionsInput {
service_type: self.service_type,
identifier: self.identifier,
metrics: self.metrics,
max_results: self.max_results,
next_token: self.next_token,
})
}
}
}
#[doc(hidden)]
pub type ListAvailableResourceDimensionsInputOperationOutputAlias =
crate::operation::ListAvailableResourceDimensions;
#[doc(hidden)]
pub type ListAvailableResourceDimensionsInputOperationRetryAlias =
aws_http::retry::AwsErrorRetryPolicy;
impl ListAvailableResourceDimensionsInput {
/// Consumes the builder and constructs an Operation<[`ListAvailableResourceDimensions`](crate::operation::ListAvailableResourceDimensions)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn
|
(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListAvailableResourceDimensions,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListAvailableResourceDimensionsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListAvailableResourceDimensionsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.ListAvailableResourceDimensions",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_list_available_resource_dimensions(&self)?
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListAvailableResourceDimensions::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListAvailableResourceDimensions",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListAvailableResourceDimensionsInput`](crate::input::ListAvailableResourceDimensionsInput)
pub fn builder() -> crate::input::list_available_resource_dimensions_input::Builder {
crate::input::list_available_resource_dimensions_input::Builder::default()
}
}
/// See [`ListAvailableResourceMetricsInput`](crate::input::ListAvailableResourceMetricsInput)
pub mod list_available_resource_metrics_input {
/// A builder for [`ListAvailableResourceMetricsInput`](crate::input::ListAvailableResourceMetricsInput)
#[non_exhaustive]
#[derive(std::default::Default, std::clone::Clone, std::cmp::PartialEq, std::fmt::Debug)]
pub struct Builder {
pub(crate) service_type: std::option::Option<crate::model::ServiceType>,
pub(crate) identifier: std::option::Option<std::string::String>,
pub(crate) metric_types: std::option::Option<std::vec::Vec<std::string::String>>,
pub(crate) next_token: std::option::Option<std::string::String>,
pub(crate) max_results: std::option::Option<i32>,
}
impl Builder {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(mut self, input: crate::model::ServiceType) -> Self {
self.service_type = Some(input);
self
}
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn set_service_type(
mut self,
input: std::option::Option<crate::model::ServiceType>,
) -> Self {
self.service_type = input;
self
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn identifier(mut self, input: impl Into<std::string::String>) -> Self {
self.identifier = Some(input.into());
self
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn set_identifier(mut self, input: std::option::Option<std::string::String>) -> Self {
self.identifier = input;
self
}
/// Appends an item to `metric_types`.
///
/// To override the contents of this collection use [`set_metric_types`](Self::set_metric_types).
///
/// <p>The types of metrics to return in the response. Valid values in the array include the following:</p>
/// <ul>
/// <li> <p> <code>os</code> (OS counter metrics)</p> </li>
/// <li> <p> <code>db</code> (DB load metrics)</p> </li>
/// <li> <p> <code>db.sql.stats</code> (per-SQL metrics)</p> </li>
/// <li> <p> <code>db.sql_tokenized.stats</code> (per-SQL digest metrics)</p> </li>
/// </ul>
pub fn metric_types(mut self, input: impl Into<std::string::String>) -> Self {
let mut v = self.metric_types.unwrap_or_default();
v.push(input.into());
self.metric_types = Some(v);
self
}
/// <p>The types of metrics to return in the response. Valid values in the array include the following:</p>
/// <ul>
/// <li> <p> <code>os</code> (OS counter metrics)</p> </li>
/// <li> <p> <code>db</code> (DB load metrics)</p> </li>
/// <li> <p> <code>db.sql.stats</code> (per-SQL metrics)</p> </li>
/// <li> <p> <code>db.sql_tokenized.stats</code> (per-SQL digest metrics)</p> </li>
/// </ul>
pub fn set_metric_types(
mut self,
input: std::option::Option<std::vec::Vec<std::string::String>>,
) -> Self {
self.metric_types = input;
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(mut self, input: impl Into<std::string::String>) -> Self {
self.next_token = Some(input.into());
self
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn set_next_token(mut self, input: std::option::Option<std::string::String>) -> Self {
self.next_token = input;
self
}
/// <p>The maximum number of items to return. If the <code>MaxRecords</code> value is less than the number of existing items, the response includes a pagination token. </p>
pub fn max_results(mut self, input: i32) -> Self {
self.max_results = Some(input);
self
}
/// <p>The maximum number of items to return. If the <code>MaxRecords</code> value is less than the number of existing items, the response includes a pagination token. </p>
pub fn set_max_results(mut self, input: std::option::Option<i32>) -> Self {
self.max_results = input;
self
}
/// Consumes the builder and constructs a [`ListAvailableResourceMetricsInput`](crate::input::ListAvailableResourceMetricsInput)
pub fn build(
self,
) -> std::result::Result<
crate::input::ListAvailableResourceMetricsInput,
aws_smithy_http::operation::BuildError,
> {
Ok(crate::input::ListAvailableResourceMetricsInput {
service_type: self.service_type,
identifier: self.identifier,
metric_types: self.metric_types,
next_token: self.next_token,
max_results: self.max_results,
})
}
}
}
#[doc(hidden)]
pub type ListAvailableResourceMetricsInputOperationOutputAlias =
crate::operation::ListAvailableResourceMetrics;
#[doc(hidden)]
pub type ListAvailableResourceMetricsInputOperationRetryAlias =
aws_http::retry::AwsErrorRetryPolicy;
impl ListAvailableResourceMetricsInput {
/// Consumes the builder and constructs an Operation<[`ListAvailableResourceMetrics`](crate::operation::ListAvailableResourceMetrics)>
#[allow(unused_mut)]
#[allow(clippy::let_and_return)]
#[allow(clippy::needless_borrow)]
pub async fn make_operation(
&self,
_config: &crate::config::Config,
) -> std::result::Result<
aws_smithy_http::operation::Operation<
crate::operation::ListAvailableResourceMetrics,
aws_http::retry::AwsErrorRetryPolicy,
>,
aws_smithy_http::operation::BuildError,
> {
let mut request = {
fn uri_base(
_input: &crate::input::ListAvailableResourceMetricsInput,
output: &mut String,
) -> Result<(), aws_smithy_http::operation::BuildError> {
write!(output, "/").expect("formatting should succeed");
Ok(())
}
#[allow(clippy::unnecessary_wraps)]
fn update_http_builder(
input: &crate::input::ListAvailableResourceMetricsInput,
builder: http::request::Builder,
) -> std::result::Result<http::request::Builder, aws_smithy_http::operation::BuildError>
{
let mut uri = String::new();
uri_base(input, &mut uri)?;
Ok(builder.method("POST").uri(uri))
}
let mut builder = update_http_builder(&self, http::request::Builder::new())?;
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::CONTENT_TYPE,
"application/x-amz-json-1.1",
);
builder = aws_smithy_http::header::set_request_header_if_absent(
builder,
http::header::HeaderName::from_static("x-amz-target"),
"PerformanceInsightsv20180227.ListAvailableResourceMetrics",
);
builder
};
let mut properties = aws_smithy_http::property_bag::SharedPropertyBag::new();
#[allow(clippy::useless_conversion)]let body = aws_smithy_http::body::SdkBody::from(
crate::operation_ser::serialize_operation_crate_operation_list_available_resource_metrics(&self)?
);
if let Some(content_length) = body.content_length() {
request = aws_smithy_http::header::set_request_header_if_absent(
request,
http::header::CONTENT_LENGTH,
content_length,
);
}
let request = request.body(body).expect("should be valid request");
let mut request = aws_smithy_http::operation::Request::from_parts(request, properties);
let mut user_agent = aws_http::user_agent::AwsUserAgent::new_from_environment(
aws_types::os_shim_internal::Env::real(),
crate::API_METADATA.clone(),
);
if let Some(app_name) = _config.app_name() {
user_agent = user_agent.with_app_name(app_name.clone());
}
request.properties_mut().insert(user_agent);
let mut signing_config = aws_sig_auth::signer::OperationSigningConfig::default_config();
request.properties_mut().insert(signing_config);
request
.properties_mut()
.insert(aws_types::SigningService::from_static(
_config.signing_service(),
));
aws_endpoint::set_endpoint_resolver(
&mut request.properties_mut(),
_config.endpoint_resolver.clone(),
);
if let Some(region) = &_config.region {
request.properties_mut().insert(region.clone());
}
aws_http::auth::set_provider(
&mut request.properties_mut(),
_config.credentials_provider.clone(),
);
let op = aws_smithy_http::operation::Operation::new(
request,
crate::operation::ListAvailableResourceMetrics::new(),
)
.with_metadata(aws_smithy_http::operation::Metadata::new(
"ListAvailableResourceMetrics",
"pi",
));
let op = op.with_retry_policy(aws_http::retry::AwsErrorRetryPolicy::new());
Ok(op)
}
/// Creates a new builder-style object to manufacture [`ListAvailableResourceMetricsInput`](crate::input::ListAvailableResourceMetricsInput)
pub fn builder() -> crate::input::list_available_resource_metrics_input::Builder {
crate::input::list_available_resource_metrics_input::Builder::default()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListAvailableResourceMetricsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub identifier: std::option::Option<std::string::String>,
/// <p>The types of metrics to return in the response. Valid values in the array include the following:</p>
/// <ul>
/// <li> <p> <code>os</code> (OS counter metrics)</p> </li>
/// <li> <p> <code>db</code> (DB load metrics)</p> </li>
/// <li> <p> <code>db.sql.stats</code> (per-SQL metrics)</p> </li>
/// <li> <p> <code>db.sql_tokenized.stats</code> (per-SQL digest metrics)</p> </li>
/// </ul>
pub metric_types: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub next_token: std::option::Option<std::string::String>,
/// <p>The maximum number of items to return. If the <code>MaxRecords</code> value is less than the number of existing items, the response includes a pagination token. </p>
pub max_results: std::option::Option<i32>,
}
impl ListAvailableResourceMetricsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
/// <p>The types of metrics to return in the response. Valid values in the array include the following:</p>
/// <ul>
/// <li> <p> <code>os</code> (OS counter metrics)</p> </li>
/// <li> <p> <code>db</code> (DB load metrics)</p> </li>
/// <li> <p> <code>db.sql.stats</code> (per-SQL metrics)</p> </li>
/// <li> <p> <code>db.sql_tokenized.stats</code> (per-SQL digest metrics)</p> </li>
/// </ul>
pub fn metric_types(&self) -> std::option::Option<&[std::string::String]> {
self.metric_types.as_deref()
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
/// <p>The maximum number of items to return. If the <code>MaxRecords</code> value is less than the number of existing items, the response includes a pagination token. </p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
}
impl std::fmt::Debug for ListAvailableResourceMetricsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListAvailableResourceMetricsInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.field("metric_types", &self.metric_types);
formatter.field("next_token", &self.next_token);
formatter.field("max_results", &self.max_results);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct ListAvailableResourceDimensionsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub identifier: std::option::Option<std::string::String>,
/// <p>The types of metrics for which to retrieve dimensions. Valid values include <code>db.load</code>.</p>
pub metrics: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved.</p>
pub max_results: std::option::Option<i32>,
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub next_token: std::option::Option<std::string::String>,
}
impl ListAvailableResourceDimensionsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>An immutable identifier for a data source that is unique within an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use an Amazon RDS DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VWZ</code>. </p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
/// <p>The types of metrics for which to retrieve dimensions. Valid values include <code>db.load</code>.</p>
pub fn metrics(&self) -> std::option::Option<&[std::string::String]> {
self.metrics.as_deref()
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved.</p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
}
impl std::fmt::Debug for ListAvailableResourceDimensionsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("ListAvailableResourceDimensionsInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.field("metrics", &self.metrics);
formatter.field("max_results", &self.max_results);
formatter.field("next_token", &self.next_token);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GetResourceMetricsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>.</p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>.</p>
pub identifier: std::option::Option<std::string::String>,
/// <p>An array of one or more queries to perform. Each query must specify a Performance Insights metric, and can optionally specify aggregation and filtering criteria.</p>
pub metric_queries: std::option::Option<std::vec::Vec<crate::model::MetricQuery>>,
/// <p>The date and time specifying the beginning of the requested time series data. You can't specify a <code>StartTime</code> that's earlier than 7 days ago. The value specified is <i>inclusive</i> - data points equal to or greater than <code>StartTime</code> will be returned.</p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>.</p>
pub start_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i> - data points less than (but not equal to) <code>EndTime</code> will be returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub end_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are:</p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights will choose a value for you, with a goal of returning roughly 100-200 data points in the response.</p>
pub period_in_seconds: std::option::Option<i32>,
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub max_results: std::option::Option<i32>,
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>.</p>
pub next_token: std::option::Option<std::string::String>,
}
impl GetResourceMetricsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>.</p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>.</p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
/// <p>An array of one or more queries to perform. Each query must specify a Performance Insights metric, and can optionally specify aggregation and filtering criteria.</p>
pub fn metric_queries(&self) -> std::option::Option<&[crate::model::MetricQuery]> {
self.metric_queries.as_deref()
}
/// <p>The date and time specifying the beginning of the requested time series data. You can't specify a <code>StartTime</code> that's earlier than 7 days ago. The value specified is <i>inclusive</i> - data points equal to or greater than <code>StartTime</code> will be returned.</p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>.</p>
pub fn start_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.start_time.as_ref()
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i> - data points less than (but not equal to) <code>EndTime</code> will be returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn end_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.end_time.as_ref()
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are:</p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights will choose a value for you, with a goal of returning roughly 100-200 data points in the response.</p>
pub fn period_in_seconds(&self) -> std::option::Option<i32> {
self.period_in_seconds
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>.</p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
}
impl std::fmt::Debug for GetResourceMetricsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GetResourceMetricsInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.field("metric_queries", &self.metric_queries);
formatter.field("start_time", &self.start_time);
formatter.field("end_time", &self.end_time);
formatter.field("period_in_seconds", &self.period_in_seconds);
formatter.field("max_results", &self.max_results);
formatter.field("next_token", &self.next_token);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GetResourceMetadataInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub identifier: std::option::Option<std::string::String>,
}
impl GetResourceMetadataInput {
/// <p>The Amazon Web Services service for which Performance Insights returns metrics.</p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>An immutable identifier for a data source that is unique for an Amazon Web Services Region. Performance Insights gathers metrics from this data source. To use a DB instance as a data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
}
impl std::fmt::Debug for GetResourceMetadataInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GetResourceMetadataInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct GetDimensionKeyDetailsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns data. The only valid value is <code>RDS</code>.</p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>The ID for a data source from which to gather dimension data. This ID must be immutable and unique within an Amazon Web Services Region. When a DB instance is the data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub identifier: std::option::Option<std::string::String>,
/// <p>The name of the dimension group. The only valid value is <code>db.sql</code>. Performance Insights searches the specified group for the dimension group ID.</p>
pub group: std::option::Option<std::string::String>,
/// <p>The ID of the dimension group from which to retrieve dimension details. For dimension group <code>db.sql</code>, the group ID is <code>db.sql.id</code>.</p>
pub group_identifier: std::option::Option<std::string::String>,
/// <p>A list of dimensions to retrieve the detail data for within the given dimension group. For the dimension group <code>db.sql</code>, specify either the full dimension name <code>db.sql.statement</code> or the short dimension name <code>statement</code>. If you don't specify this parameter, Performance Insights returns all dimension data within the specified dimension group. </p>
pub requested_dimensions: std::option::Option<std::vec::Vec<std::string::String>>,
}
impl GetDimensionKeyDetailsInput {
/// <p>The Amazon Web Services service for which Performance Insights returns data. The only valid value is <code>RDS</code>.</p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>The ID for a data source from which to gather dimension data. This ID must be immutable and unique within an Amazon Web Services Region. When a DB instance is the data source, specify its <code>DbiResourceId</code> value. For example, specify <code>db-ABCDEFGHIJKLMNOPQRSTU1VW2X</code>. </p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
/// <p>The name of the dimension group. The only valid value is <code>db.sql</code>. Performance Insights searches the specified group for the dimension group ID.</p>
pub fn group(&self) -> std::option::Option<&str> {
self.group.as_deref()
}
/// <p>The ID of the dimension group from which to retrieve dimension details. For dimension group <code>db.sql</code>, the group ID is <code>db.sql.id</code>.</p>
pub fn group_identifier(&self) -> std::option::Option<&str> {
self.group_identifier.as_deref()
}
/// <p>A list of dimensions to retrieve the detail data for within the given dimension group. For the dimension group <code>db.sql</code>, specify either the full dimension name <code>db.sql.statement</code> or the short dimension name <code>statement</code>. If you don't specify this parameter, Performance Insights returns all dimension data within the specified dimension group. </p>
pub fn requested_dimensions(&self) -> std::option::Option<&[std::string::String]> {
self.requested_dimensions.as_deref()
}
}
impl std::fmt::Debug for GetDimensionKeyDetailsInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("GetDimensionKeyDetailsInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.field("group", &self.group);
formatter.field("group_identifier", &self.group_identifier);
formatter.field("requested_dimensions", &self.requested_dimensions);
formatter.finish()
}
}
#[allow(missing_docs)] // documentation missing in model
#[non_exhaustive]
#[derive(std::clone::Clone, std::cmp::PartialEq)]
pub struct DescribeDimensionKeysInput {
/// <p>The Amazon Web Services service for which Performance Insights will return metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>. </p>
pub service_type: std::option::Option<crate::model::ServiceType>,
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use an Amazon RDS instance as a data source, you specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>. </p>
pub identifier: std::option::Option<std::string::String>,
/// <p>The date and time specifying the beginning of the requested time series data. You must specify a <code>StartTime</code> within the past 7 days. The value specified is <i>inclusive</i>, which means that data points equal to or greater than <code>StartTime</code> are returned. </p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>. </p>
pub start_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i>, which means that data points less than (but not equal to) <code>EndTime</code> are returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub end_time: std::option::Option<aws_smithy_types::DateTime>,
/// <p>The name of a Performance Insights metric to be measured.</p>
/// <p>Valid values for <code>Metric</code> are:</p>
/// <ul>
/// <li> <p> <code>db.load.avg</code> - a scaled representation of the number of active sessions for the database engine. </p> </li>
/// <li> <p> <code>db.sampledload.avg</code> - the raw number of active sessions for the database engine. </p> </li>
/// </ul>
/// <p>If the number of active sessions is less than an internal Performance Insights threshold, <code>db.load.avg</code> and <code>db.sampledload.avg</code> are the same value. If the number of active sessions is greater than the internal threshold, Performance Insights samples the active sessions, with <code>db.load.avg</code> showing the scaled values, <code>db.sampledload.avg</code> showing the raw values, and <code>db.sampledload.avg</code> less than <code>db.load.avg</code>. For most use cases, you can query <code>db.load.avg</code> only. </p>
pub metric: std::option::Option<std::string::String>,
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are: </p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights chooses a value for you, with a goal of returning roughly 100-200 data points in the response. </p>
pub period_in_seconds: std::option::Option<i32>,
/// <p>A specification for how to aggregate the data points from a query result. You must specify a valid dimension group. Performance Insights returns all dimensions within this group, unless you provide the names of specific dimensions within this group. You can also request that Performance Insights return a limited number of values for a dimension. </p>
pub group_by: std::option::Option<crate::model::DimensionGroup>,
/// <p>Additional metrics for the top <code>N</code> dimension keys. If the specified dimension group in the <code>GroupBy</code> parameter is <code>db.sql_tokenized</code>, you can specify per-SQL metrics to get the values for the top <code>N</code> SQL digests. The response syntax is <code>"AdditionalMetrics" : { "<i>string</i>" : "<i>string</i>" }</code>. </p>
/// <p></p>
pub additional_metrics: std::option::Option<std::vec::Vec<std::string::String>>,
/// <p>For each dimension specified in <code>GroupBy</code>, specify a secondary dimension to further subdivide the partition keys in the response. </p>
pub partition_by: std::option::Option<crate::model::DimensionGroup>,
/// <p>One or more filters to apply in the request. Restrictions:</p>
/// <ul>
/// <li> <p>Any number of filters by the same dimension, as specified in the <code>GroupBy</code> or <code>Partition</code> parameters.</p> </li>
/// <li> <p>A single filter for any other dimension in this dimension group.</p> </li>
/// </ul>
pub filter:
std::option::Option<std::collections::HashMap<std::string::String, std::string::String>>,
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub max_results: std::option::Option<i32>,
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub next_token: std::option::Option<std::string::String>,
}
impl DescribeDimensionKeysInput {
/// <p>The Amazon Web Services service for which Performance Insights will return metrics. The only valid value for <i>ServiceType</i> is <code>RDS</code>. </p>
pub fn service_type(&self) -> std::option::Option<&crate::model::ServiceType> {
self.service_type.as_ref()
}
/// <p>An immutable, Amazon Web Services Region-unique identifier for a data source. Performance Insights gathers metrics from this data source.</p>
/// <p>To use an Amazon RDS instance as a data source, you specify its <code>DbiResourceId</code> value. For example, specify <code>db-FAIHNTYBKTGAUSUZQYPDS2GW4A</code>. </p>
pub fn identifier(&self) -> std::option::Option<&str> {
self.identifier.as_deref()
}
/// <p>The date and time specifying the beginning of the requested time series data. You must specify a <code>StartTime</code> within the past 7 days. The value specified is <i>inclusive</i>, which means that data points equal to or greater than <code>StartTime</code> are returned. </p>
/// <p>The value for <code>StartTime</code> must be earlier than the value for <code>EndTime</code>. </p>
pub fn start_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.start_time.as_ref()
}
/// <p>The date and time specifying the end of the requested time series data. The value specified is <i>exclusive</i>, which means that data points less than (but not equal to) <code>EndTime</code> are returned.</p>
/// <p>The value for <code>EndTime</code> must be later than the value for <code>StartTime</code>.</p>
pub fn end_time(&self) -> std::option::Option<&aws_smithy_types::DateTime> {
self.end_time.as_ref()
}
/// <p>The name of a Performance Insights metric to be measured.</p>
/// <p>Valid values for <code>Metric</code> are:</p>
/// <ul>
/// <li> <p> <code>db.load.avg</code> - a scaled representation of the number of active sessions for the database engine. </p> </li>
/// <li> <p> <code>db.sampledload.avg</code> - the raw number of active sessions for the database engine. </p> </li>
/// </ul>
/// <p>If the number of active sessions is less than an internal Performance Insights threshold, <code>db.load.avg</code> and <code>db.sampledload.avg</code> are the same value. If the number of active sessions is greater than the internal threshold, Performance Insights samples the active sessions, with <code>db.load.avg</code> showing the scaled values, <code>db.sampledload.avg</code> showing the raw values, and <code>db.sampledload.avg</code> less than <code>db.load.avg</code>. For most use cases, you can query <code>db.load.avg</code> only. </p>
pub fn metric(&self) -> std::option::Option<&str> {
self.metric.as_deref()
}
/// <p>The granularity, in seconds, of the data points returned from Performance Insights. A period can be as short as one second, or as long as one day (86400 seconds). Valid values are: </p>
/// <ul>
/// <li> <p> <code>1</code> (one second)</p> </li>
/// <li> <p> <code>60</code> (one minute)</p> </li>
/// <li> <p> <code>300</code> (five minutes)</p> </li>
/// <li> <p> <code>3600</code> (one hour)</p> </li>
/// <li> <p> <code>86400</code> (twenty-four hours)</p> </li>
/// </ul>
/// <p>If you don't specify <code>PeriodInSeconds</code>, then Performance Insights chooses a value for you, with a goal of returning roughly 100-200 data points in the response. </p>
pub fn period_in_seconds(&self) -> std::option::Option<i32> {
self.period_in_seconds
}
/// <p>A specification for how to aggregate the data points from a query result. You must specify a valid dimension group. Performance Insights returns all dimensions within this group, unless you provide the names of specific dimensions within this group. You can also request that Performance Insights return a limited number of values for a dimension. </p>
pub fn group_by(&self) -> std::option::Option<&crate::model::DimensionGroup> {
self.group_by.as_ref()
}
/// <p>Additional metrics for the top <code>N</code> dimension keys. If the specified dimension group in the <code>GroupBy</code> parameter is <code>db.sql_tokenized</code>, you can specify per-SQL metrics to get the values for the top <code>N</code> SQL digests. The response syntax is <code>"AdditionalMetrics" : { "<i>string</i>" : "<i>string</i>" }</code>. </p>
/// <p></p>
pub fn additional_metrics(&self) -> std::option::Option<&[std::string::String]> {
self.additional_metrics.as_deref()
}
/// <p>For each dimension specified in <code>GroupBy</code>, specify a secondary dimension to further subdivide the partition keys in the response. </p>
pub fn partition_by(&self) -> std::option::Option<&crate::model::DimensionGroup> {
self.partition_by.as_ref()
}
/// <p>One or more filters to apply in the request. Restrictions:</p>
/// <ul>
/// <li> <p>Any number of filters by the same dimension, as specified in the <code>GroupBy</code> or <code>Partition</code> parameters.</p> </li>
/// <li> <p>A single filter for any other dimension in this dimension group.</p> </li>
/// </ul>
pub fn filter(
&self,
) -> std::option::Option<&std::collections::HashMap<std::string::String, std::string::String>>
{
self.filter.as_ref()
}
/// <p>The maximum number of items to return in the response. If more items exist than the specified <code>MaxRecords</code> value, a pagination token is included in the response so that the remaining results can be retrieved. </p>
pub fn max_results(&self) -> std::option::Option<i32> {
self.max_results
}
/// <p>An optional pagination token provided by a previous request. If this parameter is specified, the response includes only records beyond the token, up to the value specified by <code>MaxRecords</code>. </p>
pub fn next_token(&self) -> std::option::Option<&str> {
self.next_token.as_deref()
}
}
impl std::fmt::Debug for DescribeDimensionKeysInput {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut formatter = f.debug_struct("DescribeDimensionKeysInput");
formatter.field("service_type", &self.service_type);
formatter.field("identifier", &self.identifier);
formatter.field("start_time", &self.start_time);
formatter.field("end_time", &self.end_time);
formatter.field("metric", &self.metric);
formatter.field("period_in_seconds", &self.period_in_seconds);
formatter.field("group_by", &self.group_by);
formatter.field("additional_metrics", &self.additional_metrics);
formatter.field("partition_by", &self.partition_by);
formatter.field("filter", &self.filter);
formatter.field("max_results", &self.max_results);
formatter.field("next_token", &self.next_token);
formatter.finish()
}
}
|
make_operation
|
bundle.js
|
(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
var Message = require('./message')
var CircleChart = require('./circle')
if (window.location.href.indexOf("index.html") !== -1) {
var messages = [
{
title: "Lorem ipsum dolor",
subTitle: "Vestibulum non dui ac augue"
},
{
title: "Integer iaculis",
subTitle: "Luctus justo sit amet porttitor"
},
{
title: "Tristique volutpat",
subTitle: "Mauris aliquet sit viverra"
}
];
var counter = 0
var loop = function() {
var message = new Message(messages[counter]);
message.show();
counter++;
if(counter >= messages.length)
counter = 0;
setTimeout(loop, 2500)
}
loop();
};
if (window.location.href.indexOf("circle_chart.html") !== -1) {
var circleChart = new CircleChart();
circleChart.drawChart(75);
}
},{"./circle":2,"./message":3}],2:[function(require,module,exports){
var CircleChart = function(settings) {
var thatCall = this.chartValueUpdated;
var that = this;
this.circleActor = new ui.Actor({
element: "#circle-path",
values: {
length:0
},
onUpdate: function (output) {
thatCall.apply(that, [output]);
}
});
this.labelElement = document.body.querySelector("#chart-label")
};
CircleChart.prototype.drawChart = function(complete) {
var drawLine = new ui.Tween({
duration: 600,
values: {
length: complete
}
});
this.circleActor.start(drawLine);
};
CircleChart.prototype.chartValueUpdated = function(output) {
this.labelElement.textContent = Math.round(output.length) + '%';
};
module.exports = CircleChart;
},{}],3:[function(require,module,exports){
var Message = function (settings) {
var initialValues = {
y:40,
opacity:0
}
this.mainTitleActor = new ui.Actor({
element: "#main-title",
values: initialValues
});
this.subTitleActor = new ui.Actor({
element: "#sub-title",
values: initialValues
});
this.sequence = new ui.Sequence();
this.setTitle(settings.title, settings.subTitle);
}
Message.prototype.setTitle = function(title, subTitle) {
var titleEl = document.body.querySelector("#main-title"),
subTitleEl = document.body.querySelector("#sub-title"),
containerEl = document.body.querySelector(".demo-container");
titleEl.textContent = title;
subTitleEl.textContent = subTitle;
}
Message.prototype.show = function () {
var initialValues = {
y:40,
opacity:0
};
this.mainTitleActor.set(initialValues);
this.subTitleActor.set(initialValues);
this.sequence.clear()
.do(this.mainTitleActor, this.showUpTween)
.at('+=150')
.do(this.subTitleActor, this.showUpTween)
.start();
}
Message.prototype.showUpTween = new ui.Tween({
values: {
|
duration:600
});
module.exports = Message;
},{}]},{},[1]);
|
y: 0,
opacity: 1
},
|
request.rs
|
// GENERATED CODE
use crate::client::Graph;
use crate::core::ResourceIdentity;
use crate::sessions::{SessionRequest, SessionsRequest};
use graph_http::types::NoContent;
use graph_http::IntoResponse;
use handlebars::*;
use reqwest::Method;
register_client!(CallRecordRequest,);
register_client!(CallRecordsRequest, ());
impl<'a, Client> CallRecordRequest<'a, Client>
where
Client: graph_http::RequestClient,
{
get!({
doc: "# Get callRecords from communications",
name: list_call_records,
response: serde_json::Value,
path: "/callRecords",
params: 0,
has_body: false
});
post!({
doc: "# Create new navigation property to callRecords for communications",
name: create_call_records,
response: serde_json::Value,
path: "/callRecords",
params: 0,
has_body: true
});
pub fn id<ID: AsRef<str>>(&self, id: ID) -> CallRecordsRequest<'a, Client> {
self.client.set_ident(ResourceIdentity::CallRecords);
CallRecordsRequest::new(id.as_ref(), self.client)
}
}
impl<'a, Client> CallRecordsRequest<'a, Client>
where
Client: graph_http::RequestClient,
{
get!({
doc: "# Get callRecords from communications",
name: get_call_records,
response: serde_json::Value,
path: "/callRecords/{{RID}}",
params: 0,
has_body: false
});
patch!({
doc: "# Update the navigation property callRecords in communications",
name: update_call_records,
response: NoContent,
path: "/callRecords/{{RID}}",
params: 0,
has_body: true
});
get!({
doc: "# Get sessions from communications",
name: list_sessions,
response: serde_json::Value,
path: "/callRecords/{{RID}}/sessions",
params: 0,
has_body: false
});
post!({
doc: "# Create new navigation property to sessions for communications",
name: create_sessions,
response: serde_json::Value,
path: "/callRecords/{{RID}}/sessions",
params: 0,
has_body: true
});
pub fn sessions(&self) -> SessionRequest<'a, Client> {
self.client
.request
.extend_path(&[self.client.ident().as_ref(), self.id.as_str()]);
SessionRequest::new(self.client)
}
pub fn session<ID: AsRef<str>>(&self, id: ID) -> SessionsRequest<'a, Client>
|
}
|
{
self.client
.request
.extend_path(&[self.client.ident().as_ref(), self.id.as_str()]);
self.client.set_ident(ResourceIdentity::Sessions);
SessionsRequest::new(id.as_ref(), self.client)
}
|
logging-enabled-debug.rs
|
// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:--cfg ndebug
// exec-env:RUST_LOG=logging-enabled-debug=debug
#![feature(phase)]
#[phase(plugin, link)]
extern crate log;
pub fn main() {
if log_enabled!(log::DEBUG)
|
}
|
{
fail!("what?! debugging?");
}
|
main_20210613213845.py
|
class IceCream:
def __init__(self):
self.scoops = 3
def eat(self, scoops):
self.scoops = self.scoops = scoops
def add(self, scoops):
| ||
endpoint_manager.py
|
import re
class EndpointManager(object):
def __init__(self, session):
self.session = session
def __getattr__(self, name):
return SessionContext(self.session, name)
class SessionContext(object):
def __init__(self, session, name):
self.session = session
self.prefix = '/api/{}/'.format(name)
def adjust_args(self, args):
|
def get(self, *args, **kwargs):
return self.session.get(*(self.adjust_args(args)), **kwargs).json()
def head(self, *args, **kwargs):
return self.session.head(*(self.adjust_args(args)), **kwargs).headers
def filter(self, *args, **kwargs):
return self.session.filter(self.prefix, *args, **kwargs).json()
def post(self, *args, **kwargs):
return self.session.post(*(self.adjust_args(args)), **kwargs).json()
def patch(self, *args, **kwargs):
return self.session.patch(*(self.adjust_args(args)), **kwargs).json()
def delete(self, *args, **kwargs):
self.session.delete(*(self.adjust_args(args)), **kwargs)
|
new_args = list(args)
if new_args:
url = str(new_args.pop(0))
if re.match(r'^\d+$', url):
# Make it easier to post to an id, if trailing slash required.
url += '/'
else:
url = ''
new_args.insert(0, self.prefix + url)
return new_args
|
prepare.rs
|
use crate::constants::{BREAK_100, DISTANCE_R, IMG_HEIGHT, IMG_WIDTH, N_TOTAL};
use crate::utils::{read_vec, select_and_scale_vecs, select_vec_by_row_ids, sorted_paths};
use image::{GenericImage, GenericImageView, ImageResult};
use lsh_rs::{
stats::{estimate_l, l2_ph, optimize_l2_params},
utils::l2_norm,
SqlTable, LSH,
};
use ndarray::prelude::*;
use rayon::prelude::*;
use rusqlite::{named_params, params, Connection, Result as DbResult};
use std::fs;
use std::fs::{DirEntry, ReadDir};
use std::io::Write;
use std::path::PathBuf;
pub fn convert_img<P>(path: P) -> ImageResult<Vec<u8>>
where
P: AsRef<std::path::Path>,
{
let img = image::open(path)?;
let img = img.thumbnail_exact(IMG_WIDTH as u32, IMG_HEIGHT as u32);
let v: Vec<u8> = img.to_bytes();
Ok(v)
}
pub fn create_img_vecs(folder: &str, conn: &Connection) -> Result<(), Box<dyn std::error::Error>> {
let files = fs::read_dir(folder)?;
let files: Vec<DirEntry> = files.map(|e| e.unwrap()).collect();
conn.execute_batch("BEGIN TRANSACTION;")?;
let mut stmt = conn.prepare(
"
INSERT INTO vecs (path, vec) VALUES (:path, :vec)
",
)?;
let mut c = 0;
let chunk_size = 10000;
files.chunks(chunk_size).for_each(|chunk| {
let vecs: Vec<(PathBuf, Vec<u8>)> = chunk
.par_iter()
.map(|entry| {
let path = entry.path();
let v = match convert_img(&path) {
Ok(v) => v,
Err(_) => panic!("cold not read image."),
};
(path, v)
})
.collect();
c += chunk_size;
println!("{:?}", c);
vecs.iter().for_each(|(path, v)| {
stmt.execute_named(
named_params! {":path": path.as_path().to_str().unwrap(), ":vec": v},
)
.expect("failing insert");
})
});
conn.execute_batch("COMMIT TRANSACTION;")?;
Ok(())
}
pub fn sample_params(
n: usize,
delta: f64,
conn: &Connection,
) -> Result<(), Box<dyn std::error::Error>> {
let vs = select_and_scale_vecs(0, n, conn).expect("could not get vecs");
let dim = vs[0].len();
let k: Vec<usize> = (10..20).map(|a| a as usize).collect();
let mut results = optimize_l2_params(delta, dim, &k, &vs)?;
// now only ran on a sample n of N.
// search_time is expected to increase by N/n (due to duplicates)
let search_time_factor = N_TOTAL as f64 / n as f64;
results.sort_unstable_by_key(|opt_res| {
let t = opt_res.hash_time + opt_res.search_time * search_time_factor;
t as i32
});
for opt_res in results {
let t = opt_res.hash_time + opt_res.search_time * search_time_factor;
println!("{:?}, total time: {}", opt_res, t);
}
Ok(())
}
pub fn describe_vecs(conn: &Connection, n: usize) -> Result<(), std::io::Error> {
let mut l2_norms = Vec::with_capacity(n);
let mut c = 0;
let vs = select_vec_by_row_ids(0, n, conn).expect("could not get vecs from db");
for v in vs {
let v: Vec<f32> = v.iter().map(|&x| x as f32).collect();
let l2 = l2_norm(aview1(&v));
l2_norms.push(l2);
if c > 100 && BREAK_100 {
break;
}
}
println!(
"L2 norms: min: {}, max: {}, avg: {}",
l2_norms.iter().copied().fold(0. / 0., f32::min),
l2_norms.iter().copied().fold(0. / 0., f32::max),
l2_norms.iter().sum::<f32>() / l2_norms.len() as f32
);
Ok(())
|
pub fn make_lsh(
n_projections: usize,
n_hash_tables: usize,
dim: usize,
seed: u64,
r: f32,
chunk_size: usize,
conn: &Connection,
) -> Result<(), Box<dyn std::error::Error>> {
let mut stmt = conn.prepare_cached("SELECT count(*) FROM vecs;").unwrap();
let n_total: i32 = stmt.query_row(params![], |row| row.get(0)).unwrap();
let n_total = n_total as usize;
let mut lsh: LSH<SqlTable, _> = LSH::new(n_projections, n_hash_tables, dim)
.seed(seed)
.only_index()
.l2(r)?;
let mut prev_i = 0;
for i in (chunk_size..n_total).step_by(chunk_size) {
println!("{} until {}", prev_i, i);
let vs = select_and_scale_vecs(prev_i, i, conn).expect("could not get vecs from db");
prev_i = i;
lsh.store_vecs(&vs)?;
}
println!("indexing...");
lsh.commit()?;
Ok(())
}
|
}
|
influxclient.go
|
package modstation
import (
"fmt"
"log"
"strconv"
"time"
"github.com/influxdata/influxdb/client/v2"
)
//InfluxManager ---
type InfluxManager struct {
pChannelMgr *ChannelManager
influx client.Client
}
//NewInfluxManager ---
func
|
(pChannel *ChannelManager) (*InfluxManager, error) {
pInfluxManager := &InfluxManager{pChannelMgr: pChannel}
err := pInfluxManager.init()
return pInfluxManager, err
}
func (influxMgr *InfluxManager) ping() error {
_, _, err := influxMgr.influx.Ping(5 * time.Second)
return err
}
//init ok
func (influxMgr *InfluxManager) init() error {
var err error
influxMgr.influx, err = client.NewHTTPClient(client.HTTPConfig{
Addr: influxMgr.pChannelMgr.Influxdb,
Username: influxMgr.pChannelMgr.Username,
Password: influxMgr.pChannelMgr.Password,
Timeout: time.Second * 5,
})
if err != nil {
return err
}
err = influxMgr.ping()
return err
}
//ToDb --
func (influxMgr *InfluxManager) ToDb() error {
bp, err := client.NewBatchPoints(client.BatchPointsConfig{
Precision: "ms",
Database: influxMgr.pChannelMgr.MyDB,
})
if err != nil {
return err
}
for _, ch := range influxMgr.pChannelMgr.Channels {
for _, dev := range ch.Device {
for _, dp := range dev.Datapoint {
point, _ := client.NewPoint(
dp.Dpid,
map[string]string{
"addr": strconv.Itoa(int(dev.Addr)), "alarm": strconv.Itoa(int(dp.Alarm)), "connected": strconv.FormatBool(dp.Connected), "valid": strconv.FormatBool(dp.Valid)},
map[string]interface{}{"t": dp.Val}, dp.updatetime)
bp.AddPoint(point)
}
err = influxMgr.influx.Write(bp)
if err != nil {
fmt.Println("write error", err)
log.Println("write error", err)
}
}
}
return nil
}
|
NewInfluxManager
|
lib.rs
|
use std::env;
use std::panic;
use quote::quote;
use syn;
use syn::export::TokenStream;
#[proc_macro_attribute]
pub fn
|
(attr: TokenStream, item: TokenStream) -> TokenStream {
let constructor: syn::Ident = syn::parse(attr).unwrap();
let ast: syn::ItemStruct = syn::parse(item).unwrap();
let name = &ast.ident;
if env::var("MAMMOTH_MODULE").is_ok() {
panic!("Only one MammothInterface per library is allowed.");
} else {
env::set_var("MAMMOTH_MODULE", "impl");
}
let result = quote!{
trait __mammoth_interface: mammoth_setup::MammothInterface {}
#[no_mangle]
pub extern fn __version() -> semver::Version {
mammoth_setup::version::version()
}
#[no_mangle]
pub extern fn __construct(cfg: Option<toml::Value>) -> *mut mammoth_setup::MammothInterface {
let interface = Box::new(#constructor(cfg));
Box::into_raw(interface)
}
#ast
impl __mammoth_interface for #name {}
};
result.into()
}
|
mammoth_module
|
grammar.rs
|
use std::fmt;
use std::collections::VecDeque;
use pest::prelude::*;
use super::*;
impl_rdp! {
grammar! {
module = _{ soi ~ statement* ~ eoi }
// conditional is technically an expression too but it can be used as a statement
// without a semicolon as well
statement = { declaration | assignment | while_loop | conditional | (expr ~ semi) | comment }
comment = @{ block_comment | line_comment }
line_comment = _{ ["//"] ~ (!(["\r"] | ["\n"]) ~ any)* ~ (["\n"] | ["\r\n"] | ["\r"] | eoi) }
block_comment = _{ ["/*"] ~ ((!(["*/"]) ~ any) | block_comment)* ~ ["*/"] }
assignment = { identifier ~ op_assign ~ expr ~ semi}
declaration = { ["let"] ~ ["mut"]? ~ pattern ~ op_declare_type ~ type_def ~ (op_assign ~ expr)? ~ semi}
op_declare_type = { [":"] }
op_assign = { ["="] }
pattern = { identifier }
type_def = _{ identifier | array_type }
array_type = { ["["] ~ type_def ~ semi ~ array_size ~ ["]"] }
array_size = _{ unspecified | expr }
unspecified = { ["_"] }
while_loop = { ["while"] ~ expr ~ block }
expr = {
{ bool_not | func_call | field_access | string_literal | bool_literal | identifier | conditional | number }
// Ordered from lowest precedence to highest precedence
bool_or = { op_bool_or }
bool_and = { op_bool_and }
// NOTE: Order matters! { ["<"] | ["<="] } will never match "<="
comparison = { op_eq | op_ne | op_ge | op_le | op_gt | op_lt }
}
op_bool_or = { ["||"] }
op_bool_and = { ["&&"] }
op_eq = { ["=="] }
op_ne = { ["!="] }
op_ge = { [">="] }
op_le = { ["<="] }
op_gt = { [">"] }
op_lt = { ["<"] }
bool_not = _{ op_bool_not ~ expr }
op_bool_not = { ["!"] }
conditional = { ["if"] ~ expr ~ block ~ (op_else_if ~ expr ~ block)* ~ (op_else ~ block)? }
op_else_if = { ["else if"] }
op_else = { ["else"] }
// This allows {} and {statement; statement; statement;} and {statement; expr} and {expr}
block = _{ block_start ~ statement* ~ expr? ~ block_end }
block_start = { ["{"] }
block_end = { ["}"] }
func_call = { identifier ~ func_args }
field_access = { identifier ~ op_access ~ identifier ~ func_args? }
op_access = { ["."] }
// This allows () and (func_arg, func_arg) and (func_arg) and (func_arg,)
func_args = _{ func_args_start ~ (func_arg ~ [","])* ~ func_arg? ~ func_args_end }
func_args_start = { ["("] }
func_args_end = { [")"] }
func_arg = _{ expr }
string_literal = @{ ["b\""] ~ literal_char* ~ ["\""] }
literal_char = { escape_sequence | (!["\""] ~ any) }
escape_sequence = _{ ["\\\\"] | ["\\\""] | ["\\\'"] | ["\\n"] | ["\\r"] | ["\\t"] | ["\\0"] }
bool_literal = @{ ["true"] | ["false"] }
identifier = @{ !keyword ~ (alpha | ["_"]) ~ (alphanumeric | ["_"])* }
alpha = _{ ['a'..'z'] | ['A'..'Z'] }
alphanumeric = _{ alpha | ['0'..'9'] }
number = @{ ["0"] | (nonzero ~ digit*) }
// Allow "_" in numbers for grouping: 1_000_000 == 1000000
digit = _{ ["0"] | nonzero | ["_"] }
nonzero = _{ ['1'..'9'] }
whitespace = _{ [" "] | ["\t"] | ["\u{000C}"] | ["\r"] | ["\n"] }
// NOTE: When changing this code, make sure you don't have a subset of a word before
// another word. For example: { ["type"] | ["typeof"] } will never match "typeof"
keyword = @{
["abstract"] | ["as"] | ["become"] | ["break"] | ["byte"] | ["class"] | ["clear"] |
["const"] | ["continue"] | ["do"] | ["else"] | ["enum"] | ["eval"] | ["export"] |
["extern"] | ["false"] | ["final"] | ["fn"] | ["for"] | ["if"] | ["impl"] | ["import"] |
["in"] | ["let"] | ["loop"] | ["match"] | ["mod"] | ["move"] | ["mut"] | ["of"] |
["out"] | ["pub"] | ["raw"] | ["ref"] | ["return"] | ["self"] | ["static"] |
["struct"] | ["super"] | ["trait"] | ["true"] | ["typeof"] | ["type"] | ["unsafe"] |
["use"] | ["where"] | ["while"] | ["yield"]
}
// These are separate rules because we can use the generated rules and tokens to provide
// better error messages
semi = { [";"] }
}
process! {
// Top-level method that returns the abstract syntax tree based on the contents of the
// parser queue
// Make sure to call module() before this so there is something in the queue
module_ast(&self) -> Module {
(statements: _module()) => {
Module {
body: statements.into_iter().collect::<Block>(),
}
},
}
_module(&self) -> VecDeque<Statement> {
(_: statement, head: _statement(), mut tail: _module()) => {
tail.push_front(head);
tail
},
(&text: comment, mut tail: _module()) => {
tail.push_front(Statement::Comment(text.into()));
tail
},
() => {
let mut tail = VecDeque::new();
// We do this so the last statement in a block always represents its return type
tail.push_front(Statement::Expression {expr: Expression::UnitLiteral});
tail
},
}
_statement(&self) -> Statement {
(&text: comment) => {
Statement::Comment(text.into())
},
(_: declaration, pattern: _pattern(), _: op_declare_type, type_def: _type_def(), _: op_assign, _: expr, expr: _expr(), _: semi) => {
Statement::Declaration {pattern: pattern, type_def: type_def, expr: Some(expr)}
},
(_: declaration, pattern: _pattern(), _: op_declare_type, type_def: _type_def(), _: semi) => {
Statement::Declaration {pattern: pattern, type_def: type_def, expr: None}
},
(_: assignment, ident: _identifier(), _: op_assign, _: expr, expr: _expr(), _: semi) => {
Statement::Assignment {lhs: ident, expr: expr}
},
(_: while_loop, _: expr, condition: _expr(), body: _block()) => {
Statement::WhileLoop {condition: condition, body: body}
},
(_: conditional, expr: _conditional()) => {
Statement::Expression {expr: expr}
},
// This should always be lmodule_ast as it will catch pretty much any cases that weren't caught above
(_: expr, expr: _expr(), _: semi) => {
Statement::Expression {expr: expr}
},
}
_pattern(&self) -> Pattern {
(_: pattern, ident: _identifier()) => {
Pattern::Identifier(ident)
},
}
_type_def(&self) -> TypeDefinition {
(_: array_type, type_def: _type_def(), _: semi, _: unspecified) => {
TypeDefinition::Array {type_def: Box::new(type_def), size: None}
},
(_: array_type, type_def: _type_def(), _: semi, _: expr, size: _expr()) => {
TypeDefinition::Array {type_def: Box::new(type_def), size: Some(size)}
},
(ident: _identifier()) => {
TypeDefinition::Name {name: ident}
},
}
_expr(&self) -> Expression {
(_: op_bool_not, _:expr, expr: _expr()) => {
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::ops::Not"))),
args: vec![expr],
}
},
(_: func_call, method: _identifier(), args: _call_args()) => {
Expression::Call {
method: Box::new(Expression::Identifier(method)),
args: args,
}
},
(_: field_access, expr: _field_access()) => {
expr
},
(_: conditional, expr: _conditional()) => {
expr
},
(_: bool_or, lhs: _expr(), _: op_bool_or, rhs: _expr()) => {
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![lhs, rhs],
}
},
(_: bool_and, lhs: _expr(), _: op_bool_and, rhs: _expr()) => {
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![lhs, rhs],
}
},
(_: comparison, lhs: _expr(), op_token, rhs: _expr()) => {
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from(match op_token.rule {
Rule::op_eq => "std::cmp::PartialEq::eq",
Rule::op_ne => "std::cmp::PartialEq::ne",
Rule::op_ge => "std::cmp::PartialOrd::ge",
Rule::op_le => "std::cmp::PartialOrd::le",
Rule::op_gt => "std::cmp::PartialOrd::gt",
Rule::op_lt => "std::cmp::PartialOrd::lt",
_ => unreachable!(),
}))),
args: vec![lhs, rhs],
}
},
(&ident: bool_literal) => {
Expression::Identifier(ident.into())
},
(&ident: identifier) => {
Expression::Identifier(ident.into())
},
(_: string_literal, s: _literal_chars()) => {
Expression::ByteLiteral(s.into_iter().collect())
},
(&s: number) => {
// If our grammar is correct, we are guarenteed that this will work
Expression::Number(s.replace("_", "").parse().unwrap())
},
}
_field_access(&self) -> Expression {
(target: _identifier(), _: op_access, field: _identifier(), args: _call_args()) => {
Expression::Call {
method: Box::new(Expression::Access {
target: Box::new(Expression::Identifier(target)),
field: field,
}),
args: args,
}
},
(target: _identifier(), _: op_access, field: _identifier()) => {
Expression::Access {
target: Box::new(Expression::Identifier(target)),
field: field,
}
},
}
_conditional(&self) -> Expression {
(_: expr, expr: _expr(), block: _block(), _: op_else_if, branches: _branches(), _: op_else, else_block: _block()) => {
Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: Some(nest_else_ifs(branches, Some(else_block))),
}
},
(_: expr, expr: _expr(), block: _block(), _: op_else_if, branches: _branches()) => {
Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: Some(nest_else_ifs(branches, None)),
}
},
(_: expr, expr: _expr(), block: _block(), _: op_else, else_block: _block()) => {
Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: Some(else_block),
}
},
(_: expr, expr: _expr(), block: _block()) => {
Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: None,
}
},
}
_branches(&self) -> VecDeque<Expression> {
(_: expr, expr: _expr(), block: _block(), _: op_else_if, mut tail: _branches()) => {
tail.push_front(Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: None,
});
tail
},
(_: expr, expr: _expr(), block: _block()) => {
let mut queue = VecDeque::new();
queue.push_front(Expression::Branch {
condition: Box::new(expr),
body: block,
otherwise: None,
});
queue
},
}
_call_args(&self) -> CallArgs {
(_: func_args_start, deque: _expr_deque()) => {
deque.into_iter().collect()
},
}
_expr_deque(&self) -> VecDeque<Expression> {
(_: func_args_end) => {
VecDeque::new()
},
(_: expr, head: _expr(), mut tail: _expr_deque()) => {
tail.push_front(head);
tail
},
}
_block(&self) -> Block {
(_: block_start, deque: _block_deque()) => {
deque.into_iter().collect()
},
}
_block_deque(&self) -> VecDeque<Statement> {
(_: statement, head: _statement(), mut tail: _block_deque()) => {
tail.push_front(head);
tail
},
(&text: comment, mut tail: _block_deque()) => {
tail.push_front(Statement::Comment(text.into()));
tail
},
(_: expr, head: _expr(), _: block_end) => {
let mut tail = VecDeque::new();
tail.push_front(Statement::Expression {expr: head});
tail
},
(_: block_end) => {
let mut tail = VecDeque::new();
// We do this so the last statement in a block always represents its return type
tail.push_front(Statement::Expression {expr: Expression::UnitLiteral});
tail
},
}
_literal_chars(&self) -> VecDeque<u8> {
(&c: literal_char, mut tail: _literal_chars()) => {
if c.len() == 2 {
debug_assert!(c.bytes().next().unwrap() == b'\\');
tail.push_front(match c.bytes().nth(1).unwrap() {
b'\\' => b'\\',
b'"' => b'"',
b'\'' => b'\'',
b'n' => b'\n',
b'r' => b'\r',
b't' => b'\t',
b'0' => b'\0',
//TODO: Replace this with a proper result when upgrading to pest 1.0
_ => panic!("Unknown escape: {}", c)
});
}
else {
debug_assert!(c.len() == 1);
tail.push_front(c.bytes().next().unwrap());
}
tail
},
() => {
VecDeque::new()
},
}
_identifier(&self) -> Identifier {
(&ident: identifier) => {
ident.into()
},
}
}
}
/// Given a series of branch expressions, this will nest them together
/// so that they result in a single nested branch expression
///
/// # Example
/// Given:
/// if foo1 { body1 } else {}
/// if foo2 { body2 } else {}
/// if foo3 { body3 } else {}
///
/// Results in:
/// if foo1 { body1 } else { if foo2 { body2 } else { if foo3 { body3 } else {} } }
fn nest_else_ifs(branches: VecDeque<Expression>, else_block: Option<Block>) -> Block {
branches.into_iter().rev().fold(else_block, |acc, mut br| {
Some(vec![Statement::Expression {
expr: {
match br {
Expression::Branch {ref mut otherwise, ..} => {
*otherwise = acc;
},
_ => unreachable!(),
};
br
},
}])
}).unwrap()
}
impl fmt::Display for Rule {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// Just to make things a bit more ergonomic
use self::Rule::*;
write!(f, "{}", match *self {
eoi => "EOF",
comment => "comment",
identifier => "identifier",
keyword => "keyword",
number => "number",
string_literal => "string literal",
bool_literal => "boolean literal",
literal_char => "character",
any => "any character",
unspecified => "`_`",
semi => "`;`",
bool_or => "`or`",
bool_and => "`and`",
conditional => "`if`",
op_else_if => "`else if`",
op_else => "`else`",
op_assign => "`=`",
op_bool_or => "`||`",
op_bool_and => "`&&`",
op_bool_not => "`!`",
op_eq => "`==`",
op_ne => "`!=`",
op_ge => "`>=`",
op_le => "`<=`",
op_gt => "`>`",
op_lt => "`<`",
op_access => "`.`",
op_declare_type => "`:`",
block_start => "`{`",
block_end => "`}`",
func_args_start => "`(`",
func_args_end => "`)`",
// There are many rules that will never get matched here because
// this method is meant to be used for formatting errors
// We don't want to use the "_" wildcard because we want Rust
// to tell us when a new rule has to be added here
statement | assignment | declaration | pattern | array_type | while_loop | comparison |
func_call | field_access | expr | soi => unreachable!("{:?}", *self),
})
}
}
#[cfg(test)]
mod tests {
use std::fmt::Debug;
use pest::prelude::*;
use super::*;
#[test]
fn string_literal() {
test_parse(r#"b"""#, |p| p.string_literal(), vec![
Token::new(Rule::string_literal, 0, 3),
]);
test_parse(r#"b"foo""#, |p| p.string_literal(), vec![
Token::new(Rule::string_literal, 0, 6),
Token::new(Rule::literal_char, 2, 3),
Token::new(Rule::literal_char, 3, 4),
Token::new(Rule::literal_char, 4, 5),
]);
}
#[test]
fn number() {
test_parse(r#"0"#, |p| p.number(), vec![
Token::new(Rule::number, 0, 1),
]);
test_parse(r#"100"#, |p| p.number(), vec![
Token::new(Rule::number, 0, 3),
]);
test_parse(r#"1_000_000"#, |p| p.number(), vec![
Token::new(Rule::number, 0, 9),
]);
test_parse(r#"1_000_000_"#, |p| p.number(), vec![
Token::new(Rule::number, 0, 10),
]);
test_parse(r#"1____0_0__0______000____"#, |p| p.number(), vec![
Token::new(Rule::number, 0, 24),
]);
test_fail(r#"_1_000_000"#, |p| p.number());
}
#[test]
fn field_access() {
test_parse(r#"foo.bar"#, |p| p.field_access(), vec![
Token::new(Rule::field_access, 0, 7),
Token::new(Rule::identifier, 0, 3),
Token::new(Rule::op_access, 3, 4),
Token::new(Rule::identifier, 4, 7),
]);
}
#[test]
fn numeric_literal() {
test_method(r#"0"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Number(0)
);
test_method(r#"100"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Number(100)
);
test_method(r#"1_000_000"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Number(1_000_000)
);
test_method(r#"1_000_000_"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Number(1_000_000_)
);
test_method(r#"1____0_0__0______000____"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Number(1____0_0__0______000____)
);
}
#[test]
fn string_literal_escapes() {
test_method(r#"b"foo""#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::ByteLiteral(b"foo".to_vec()));
test_method(r#"b"\\ \" \' \n \r \t \0""#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::ByteLiteral(b"\\ \" \' \n \r \t \0".to_vec()));
}
#[test]
fn functions_field_access() {
test_method(r#"func(1, b"foo", 3)"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("func"))),
args: vec![
Expression::Number(1),
Expression::ByteLiteral(b"foo".to_vec()),
Expression::Number(3),
],
}
);
test_method(r#"thing.prop(1, b"foo", 3)"#, |p| p.expr(), |p| {p.inc_queue_index(); p._expr()},
Expression::Call {
method: Box::new(Expression::Access {
target: Box::new(Expression::Identifier(Identifier::from("thing"))),
field: Identifier::from("prop"),
}),
args: vec![
Expression::Number(1),
Expression::ByteLiteral(b"foo".to_vec()),
Expression::Number(3),
],
}
);
}
#[test]
fn empty_program() {
test_method(r#""#, |p| p.module(), |p| p.module_ast(),
Module::empty());
test_method(r#"
"#, |p| p.module(), |p| p.module_ast(),
Module::empty());
test_method(r#"
"#, |p| p.module(), |p| p.module_ast(),
Module::empty());
}
#[test]
fn leading_whitespace() {
test_method(r#"
foo();
"#, |p| p.module(), |p| p.module_ast(),
Module::from(vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("foo"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
])
);
}
#[test]
fn binary_operators() {
test_method(r#"
a || b;
a && b;
a == b;
a != b;
a >= b;
a <= b;
a > b;
a < b;
a && b || c;
a && b || c && d;
a == b || c && d;
a == b || c != d;
a && b || c >= d;
a <= b || c >= d;
a < b || c > d;
a && b && c;
a && b && c && d;
a == b && c && d;
a == b && c != d;
a && b && c >= d;
a <= b && c >= d;
a < b && c > d;
"#, |p| p.module(), |p| p.module_ast(),
Module::from(vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::eq"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::ne"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::ge"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::le"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::gt"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::lt"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Identifier(Identifier::from("c")),
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::eq"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
|
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::eq"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::ne"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::ge"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::le"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::ge"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator||"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::lt"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::gt"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Identifier(Identifier::from("c")),
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Identifier(Identifier::from("c")),
],
},
Expression::Identifier(Identifier::from("d")),
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::eq"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Identifier(Identifier::from("c")),
],
},
Expression::Identifier(Identifier::from("d")),
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::eq"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialEq::ne"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::ge"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::le"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::ge"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::lt"))),
args: vec![Expression::Identifier(Identifier::from("a")), Expression::Identifier(Identifier::from("b"))],
},
Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("std::cmp::PartialOrd::gt"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
])
);
}
#[test]
fn conditionals() {
// Basic if
test_method(r#"
if foo {
a();
}
"#.trim(), |p| p.statement(), |p| {p.inc_queue_index(); p._statement()},
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("a"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: None,
},
}
);
// Basic if else
test_method(r#"
if foo {
a();
}
else {
b();
}
"#.trim(), |p| p.statement(), |p| {p.inc_queue_index(); p._statement()},
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("a"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("b"))),
args: vec![],
}
},
Statement::Expression {expr: Expression::UnitLiteral},
]),
},
}
);
// Basic if else-if else
test_method(r#"
if foo {
a();
}
else if foo2 {
c();
}
else if foo3 {
d();
}
else {
b();
}
"#.trim(), |p| p.statement(), |p| {p.inc_queue_index(); p._statement()},
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("a"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo2"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("c"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo3"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("d"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("b"))),
args: vec![],
}
},
Statement::Expression {expr: Expression::UnitLiteral},
]),
},
},
]),
},
},
]),
},
}
);
// Basic if else-if (no else)
test_method(r#"
if foo {
a();
}
else if foo2 {
c();
}
else if foo3 {
d();
}
"#.trim(), |p| p.statement(), |p| {p.inc_queue_index(); p._statement()},
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("a"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo2"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("c"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo3"))),
body: vec![
Statement::Expression {
expr: Expression::Call {
method: Box::new(Expression::Identifier(Identifier::from("d"))),
args: vec![],
},
},
Statement::Expression {expr: Expression::UnitLiteral},
],
otherwise: None,
},
},
]),
},
},
]),
},
}
);
// Declaration using if expression
test_method(r#"
let a: u8 = if foo {
1
}
else if bar7 {
2
}
else {
3
};
"#.trim(), |p| p.statement(), |p| {p.inc_queue_index(); p._statement()},
Statement::Declaration {
pattern: Pattern::Identifier(Identifier::from("a")),
type_def: TypeDefinition::Name {
name: Identifier::from("u8"),
},
expr: Some(Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("foo"))),
body: vec![
Statement::Expression {
expr: Expression::Number(1),
},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Branch {
condition: Box::new(Expression::Identifier(Identifier::from("bar7"))),
body: vec![
Statement::Expression {
expr: Expression::Number(2)
},
],
otherwise: Some(vec![
Statement::Expression {
expr: Expression::Number(3)
},
]),
},
},
]),
}),
}
);
}
fn test_parse<F>(input: &'static str, parse: F, tokens: Vec<Token<Rule>>)
where F: FnOnce(&mut Rdp<StringInput>) -> bool {
let mut parser = parser_from(input);
assert!(parse(&mut parser), "Parsing failed");
assert!(parser.end(), "Parser did not reach eoi");
assert_eq!(parser.queue(), &tokens);
}
fn test_method<T: Debug + PartialEq, F, P>(input: &'static str, parse: P, method: F, expected: T)
where P: FnOnce(&mut Rdp<StringInput>) -> bool,
F: FnOnce(&Rdp<StringInput>) -> T {
let mut parser = parser_from(input);
assert!(parse(&mut parser), "Parsing failed");
assert!(parser.end(), "Parser did not reach eoi");
assert_eq!(method(&parser), expected);
}
fn test_fail<F>(input: &'static str, parse: F)
where F: FnOnce(&mut Rdp<StringInput>) -> bool {
let mut parser = parser_from(input);
assert!(!parse(&mut parser), "Parsing passed when expected it to fail");
assert!(!parser.end(), "Parser reached end when expected it to fail");
assert!(parser.queue().is_empty(), "Queue was not empty despite expecting to fail");
}
fn parser_from(s: &'static str) -> Rdp<StringInput> {
Rdp::new(StringInput::new(s))
}
}
|
method: Box::new(Expression::Identifier(Identifier::from("operator&&"))),
args: vec![Expression::Identifier(Identifier::from("c")), Expression::Identifier(Identifier::from("d"))],
},
],
|
translate.py
|
import abc
from collections import defaultdict
from contextlib import contextmanager
from dataclasses import dataclass, field, replace
import math
import struct
import sys
import traceback
import typing
from typing import (
AbstractSet,
Callable,
Collection,
DefaultDict,
Dict,
Iterator,
List,
Mapping,
Optional,
Set,
Tuple,
Union,
)
from .c_types import CType, TypeMap
from .demangle_codewarrior import parse as demangle_codewarrior_parse, CxxSymbol
from .error import DecompFailure, static_assert_unreachable
from .flow_graph import (
ArchFlowGraph,
FlowGraph,
Function,
Node,
ReturnNode,
SwitchNode,
TerminalNode,
locs_clobbered_until_dominator,
)
from .ir_pattern import IrPattern, simplify_ir_patterns
from .options import CodingStyle, Formatter, Options, Target
from .parse_file import AsmData, AsmDataEntry
from .parse_instruction import (
ArchAsm,
Argument,
AsmAddressMode,
AsmGlobalSymbol,
AsmLiteral,
BinOp,
Instruction,
InstrProcessingFailure,
Macro,
Register,
StackLocation,
current_instr,
)
from .types import (
AccessPath,
FunctionParam,
FunctionSignature,
StructDeclaration,
Type,
TypePool,
)
InstrSet = Collection[str]
InstrMap = Mapping[str, Callable[["InstrArgs"], "Expression"]]
StmtInstrMap = Mapping[str, Callable[["InstrArgs"], "Statement"]]
CmpInstrMap = Mapping[str, Callable[["InstrArgs"], "Condition"]]
StoreInstrMap = Mapping[str, Callable[["InstrArgs"], Optional["StoreStmt"]]]
MaybeInstrMap = Mapping[str, Callable[["InstrArgs"], Optional["Expression"]]]
PairInstrMap = Mapping[str, Callable[["InstrArgs"], Tuple["Expression", "Expression"]]]
ImplicitInstrMap = Mapping[str, Tuple[Register, Callable[["InstrArgs"], "Expression"]]]
PpcCmpInstrMap = Mapping[str, Callable[["InstrArgs", str], "Expression"]]
class Arch(ArchFlowGraph):
instrs_ignore: InstrSet = set()
instrs_store: StoreInstrMap = {}
instrs_store_update: StoreInstrMap = {}
instrs_load_update: InstrMap = {}
instrs_branches: CmpInstrMap = {}
instrs_float_branches: InstrSet = set()
instrs_float_comp: CmpInstrMap = {}
instrs_ppc_compare: PpcCmpInstrMap = {}
instrs_jumps: InstrSet = set()
instrs_fn_call: InstrSet = set()
instrs_no_dest: StmtInstrMap = {}
instrs_hi_lo: PairInstrMap = {}
instrs_source_first: InstrMap = {}
instrs_destination_first: InstrMap = {}
instrs_implicit_destination: ImplicitInstrMap = {}
@abc.abstractmethod
def function_abi(
self,
fn_sig: FunctionSignature,
likely_regs: Dict[Register, bool],
*,
for_call: bool,
) -> "Abi":
"""
Compute stack positions/registers used by a function based on its type
information. Also computes a list of registers that may contain arguments,
if the function has varargs or an unknown/incomplete type.
"""
...
@abc.abstractmethod
def function_return(self, expr: "Expression") -> Dict[Register, "Expression"]:
"""
Compute register location(s) & values that will hold the return value
of the function call `expr`.
This must have a value for each register in `all_return_regs` in order to stay
consistent with `Instruction.outputs`. This is why we can't use the
function's return type, even though it may be more accurate.
"""
...
# These are defined here to avoid a circular import in flow_graph.py
ir_patterns: List[IrPattern] = []
def simplify_ir(self, flow_graph: FlowGraph) -> None:
simplify_ir_patterns(self, flow_graph, self.ir_patterns)
ASSOCIATIVE_OPS: Set[str] = {"+", "&&", "||", "&", "|", "^", "*"}
COMPOUND_ASSIGNMENT_OPS: Set[str] = {"+", "-", "*", "/", "%", "&", "|", "^", "<<", ">>"}
PSEUDO_FUNCTION_OPS: Set[str] = {"MULT_HI", "MULTU_HI", "DMULT_HI", "DMULTU_HI", "CLZ"}
def as_type(expr: "Expression", type: Type, silent: bool) -> "Expression":
type = type.weaken_void_ptr()
ptr_target_type = type.get_pointer_target()
if expr.type.unify(type):
if silent or isinstance(expr, Literal):
return expr
elif ptr_target_type is not None:
ptr_target_type_size = ptr_target_type.get_size_bytes()
field_path, field_type, _ = expr.type.get_deref_field(
0, target_size=ptr_target_type_size
)
if field_path is not None and field_type.unify(ptr_target_type):
expr = AddressOf(
StructAccess(
struct_var=expr,
offset=0,
target_size=ptr_target_type_size,
field_path=field_path,
stack_info=None,
type=field_type,
),
type=type,
)
if silent:
return expr
return Cast(expr=expr, reinterpret=True, silent=False, type=type)
def as_f32(expr: "Expression") -> "Expression":
return as_type(expr, Type.f32(), True)
def as_f64(expr: "Expression") -> "Expression":
return as_type(expr, Type.f64(), True)
def as_sintish(expr: "Expression", *, silent: bool = False) -> "Expression":
return as_type(expr, Type.sintish(), silent)
def as_uintish(expr: "Expression") -> "Expression":
return as_type(expr, Type.uintish(), False)
def as_u32(expr: "Expression") -> "Expression":
return as_type(expr, Type.u32(), False)
def as_s64(expr: "Expression", *, silent: bool = False) -> "Expression":
return as_type(expr, Type.s64(), silent)
def as_u64(expr: "Expression", *, silent: bool = False) -> "Expression":
return as_type(expr, Type.u64(), silent)
def as_intish(expr: "Expression") -> "Expression":
return as_type(expr, Type.intish(), True)
def as_int64(expr: "Expression") -> "Expression":
return as_type(expr, Type.int64(), True)
def as_intptr(expr: "Expression") -> "Expression":
return as_type(expr, Type.intptr(), True)
def as_ptr(expr: "Expression") -> "Expression":
return as_type(expr, Type.ptr(), True)
def as_function_ptr(expr: "Expression") -> "Expression":
return as_type(expr, Type.ptr(Type.function()), True)
@dataclass
class StackInfo:
function: Function
global_info: "GlobalInfo"
flow_graph: FlowGraph
allocated_stack_size: int = 0
is_leaf: bool = True
is_variadic: bool = False
uses_framepointer: bool = False
subroutine_arg_top: int = 0
callee_save_regs: Set[Register] = field(default_factory=set)
callee_save_reg_region: Tuple[int, int] = (0, 0)
unique_type_map: Dict[Tuple[str, object], "Type"] = field(default_factory=dict)
local_vars: List["LocalVar"] = field(default_factory=list)
temp_vars: List["EvalOnceStmt"] = field(default_factory=list)
phi_vars: List["PhiExpr"] = field(default_factory=list)
reg_vars: Dict[Register, "RegisterVar"] = field(default_factory=dict)
used_reg_vars: Set[Register] = field(default_factory=set)
arguments: List["PassedInArg"] = field(default_factory=list)
temp_name_counter: Dict[str, int] = field(default_factory=dict)
nonzero_accesses: Set["Expression"] = field(default_factory=set)
param_names: Dict[int, str] = field(default_factory=dict)
stack_pointer_type: Optional[Type] = None
replace_first_arg: Optional[Tuple[str, Type]] = None
weak_stack_var_types: Dict[int, Type] = field(default_factory=dict)
weak_stack_var_locations: Set[int] = field(default_factory=set)
def temp_var(self, prefix: str) -> str:
counter = self.temp_name_counter.get(prefix, 0) + 1
self.temp_name_counter[prefix] = counter
return prefix + (f"_{counter}" if counter > 1 else "")
def in_subroutine_arg_region(self, location: int) -> bool:
if self.global_info.arch.arch == Target.ArchEnum.PPC:
return False
if self.is_leaf:
return False
assert self.subroutine_arg_top is not None
return location < self.subroutine_arg_top
def in_callee_save_reg_region(self, location: int) -> bool:
lower_bound, upper_bound = self.callee_save_reg_region
if lower_bound <= location < upper_bound:
return True
# PPC saves LR in the header of the previous stack frame
if (
self.global_info.arch.arch == Target.ArchEnum.PPC
and location == self.allocated_stack_size + 4
):
return True
return False
def location_above_stack(self, location: int) -> bool:
return location >= self.allocated_stack_size
def add_known_param(self, offset: int, name: Optional[str], type: Type) -> None:
# A common pattern in C for OOP-style polymorphism involves casting a general "base" struct
# to a specific "class" struct, where the first member of the class struct is the base struct.
#
# For the first argument of the function, if it is a pointer to a base struct, and there
# exists a class struct named after the first part of the function name, assume that
# this pattern is being used. Internally, treat the argument as a pointer to the *class*
# struct, even though it is only a pointer to the *base* struct in the provided context.
if offset == 0 and type.is_pointer() and self.replace_first_arg is None:
namespace = self.function.name.partition("_")[0]
base_struct_type = type.get_pointer_target()
self_struct = self.global_info.typepool.get_struct_by_tag_name(
namespace, self.global_info.typemap
)
if (
self_struct is not None
and base_struct_type is not None
and base_struct_type.is_struct()
):
# Check if `self_struct_type` contains a `base_struct_type` at offset 0
self_struct_type = Type.struct(self_struct)
field_path, field_type, _ = self_struct_type.get_field(
offset=0, target_size=base_struct_type.get_size_bytes()
)
if (
field_path is not None
and field_type.unify(base_struct_type)
and not self_struct_type.unify(base_struct_type)
):
# Success, it looks like `self_struct_type` extends `base_struct_type`.
# By default, name the local var `self`, unless the argument name is `thisx` then use `this`
self.replace_first_arg = (name or "_self", type)
name = "this" if name == "thisx" else "self"
type = Type.ptr(Type.struct(self_struct))
if name:
self.param_names[offset] = name
_, arg = self.get_argument(offset)
self.add_argument(arg)
arg.type.unify(type)
def get_param_name(self, offset: int) -> Optional[str]:
return self.param_names.get(offset)
def add_local_var(self, var: "LocalVar") -> None:
if any(v.value == var.value for v in self.local_vars):
return
self.local_vars.append(var)
# Make sure the local vars stay sorted in order on the stack.
self.local_vars.sort(key=lambda v: v.value)
def add_argument(self, arg: "PassedInArg") -> None:
if any(a.value == arg.value for a in self.arguments):
return
self.arguments.append(arg)
self.arguments.sort(key=lambda a: a.value)
def get_argument(self, location: int) -> Tuple["Expression", "PassedInArg"]:
real_location = location & -4
arg = PassedInArg(
real_location,
copied=True,
stack_info=self,
type=self.unique_type_for("arg", real_location, Type.any_reg()),
)
if real_location == location - 3:
return as_type(arg, Type.int_of_size(8), True), arg
if real_location == location - 2:
return as_type(arg, Type.int_of_size(16), True), arg
return arg, arg
def record_struct_access(self, ptr: "Expression", location: int) -> None:
if location:
self.nonzero_accesses.add(unwrap_deep(ptr))
def has_nonzero_access(self, ptr: "Expression") -> bool:
return unwrap_deep(ptr) in self.nonzero_accesses
def unique_type_for(self, category: str, key: object, default: Type) -> "Type":
key = (category, key)
if key not in self.unique_type_map:
self.unique_type_map[key] = default
return self.unique_type_map[key]
def saved_reg_symbol(self, reg_name: str) -> "GlobalSymbol":
sym_name = "saved_reg_" + reg_name
type = self.unique_type_for("saved_reg", sym_name, Type.any_reg())
return GlobalSymbol(symbol_name=sym_name, type=type)
def should_save(self, expr: "Expression", offset: Optional[int]) -> bool:
expr = early_unwrap(expr)
if isinstance(expr, GlobalSymbol) and (
expr.symbol_name.startswith("saved_reg_") or expr.symbol_name == "sp"
):
return True
if (
isinstance(expr, PassedInArg)
and not expr.copied
and (offset is None or offset == self.allocated_stack_size + expr.value)
):
return True
return False
def get_stack_var(self, location: int, *, store: bool) -> "Expression":
# See `get_stack_info` for explanation
if self.in_callee_save_reg_region(location):
# Some annoying bookkeeping instruction. To avoid
# further special-casing, just return whatever - it won't matter.
return LocalVar(location, type=Type.any_reg(), path=None)
elif self.location_above_stack(location):
ret, arg = self.get_argument(location - self.allocated_stack_size)
if not store:
self.add_argument(arg)
return ret
elif self.in_subroutine_arg_region(location):
return SubroutineArg(location, type=Type.any_reg())
else:
# Local variable
assert self.stack_pointer_type is not None
field_path, field_type, _ = self.stack_pointer_type.get_deref_field(
location, target_size=None
)
# Some variables on the stack are compiler-managed, and aren't declared
# in the original source. These variables can have different types inside
# different blocks, so we track their types but assume that they may change
# on each store.
# TODO: Because the types are tracked in StackInfo instead of RegInfo, it is
# possible that a load could incorrectly use a weak type from a sibling node
# instead of a parent node. A more correct implementation would use similar
# logic to the PhiExpr system. In practice however, storing types in StackInfo
# works well enough because nodes are traversed approximately depth-first.
# TODO: Maybe only do this for certain configurable regions?
# Get the previous type stored in `location`
previous_stored_type = self.weak_stack_var_types.get(location)
if previous_stored_type is not None:
# Check if the `field_type` is compatible with the type of the last store
if not previous_stored_type.unify(field_type):
# The types weren't compatible: mark this `location` as "weak"
# This marker is only used to annotate the output
self.weak_stack_var_locations.add(location)
if store:
# If there's already been a store to `location`, then return a fresh type
field_type = Type.any_field()
else:
# Use the type of the last store instead of the one from `get_deref_field()`
field_type = previous_stored_type
# Track the type last stored at `location`
if store:
self.weak_stack_var_types[location] = field_type
return LocalVar(location, type=field_type, path=field_path)
def maybe_get_register_var(self, reg: Register) -> Optional["RegisterVar"]:
return self.reg_vars.get(reg)
def add_register_var(self, reg: Register, name: str) -> None:
type = Type.floatish() if reg.is_float() else Type.intptr()
self.reg_vars[reg] = RegisterVar(reg=reg, type=type, name=name)
def use_register_var(self, var: "RegisterVar") -> None:
self.used_reg_vars.add(var.reg)
def is_stack_reg(self, reg: Register) -> bool:
if reg == self.global_info.arch.stack_pointer_reg:
return True
if reg == self.global_info.arch.frame_pointer_reg:
return self.uses_framepointer
return False
def get_struct_type_map(self) -> Dict["Expression", Dict[int, Type]]:
"""Reorganize struct information in unique_type_map by var & offset"""
struct_type_map: Dict[Expression, Dict[int, Type]] = {}
for (category, key), type in self.unique_type_map.items():
if category != "struct":
continue
var, offset = typing.cast(Tuple[Expression, int], key)
if var not in struct_type_map:
struct_type_map[var] = {}
struct_type_map[var][offset] = type
return struct_type_map
def __str__(self) -> str:
return "\n".join(
[
f"Stack info for function {self.function.name}:",
f"Allocated stack size: {self.allocated_stack_size}",
f"Leaf? {self.is_leaf}",
f"Bounds of callee-saved vars region: {self.callee_save_reg_region}",
f"Callee save registers: {self.callee_save_regs}",
]
)
def get_stack_info(
function: Function,
global_info: "GlobalInfo",
flow_graph: FlowGraph,
) -> StackInfo:
arch = global_info.arch
info = StackInfo(function, global_info, flow_graph)
# The goal here is to pick out special instructions that provide information
# about this function's stack setup.
#
# IDO puts local variables *above* the saved registers on the stack, but
# GCC puts local variables *below* the saved registers.
# To support both, we explicitly determine both the upper & lower bounds of the
# saved registers. Then, we estimate the boundary of the subroutine arguments
# by finding the lowest stack offset that is loaded from or computed. (This
# assumes that the compiler will never reuse a section of stack for *both*
# a local variable *and* a subroutine argument.) Anything within the stack frame,
# but outside of these two regions, is considered a local variable.
callee_saved_offsets: List[int] = []
# Track simple literal values stored into registers: MIPS compilers need a temp
# reg to move the stack pointer more than 0x7FFF bytes.
temp_reg_values: Dict[Register, int] = {}
for inst in flow_graph.entry_node().block.instructions:
arch_mnemonic = inst.arch_mnemonic(arch)
if inst.mnemonic in arch.instrs_fn_call:
break
elif arch_mnemonic == "mips:addiu" and inst.args[0] == arch.stack_pointer_reg:
# Moving the stack pointer on MIPS
assert isinstance(inst.args[2], AsmLiteral)
info.allocated_stack_size = abs(inst.args[2].signed_value())
elif (
arch_mnemonic == "mips:subu"
and inst.args[0] == arch.stack_pointer_reg
and inst.args[1] == arch.stack_pointer_reg
and inst.args[2] in temp_reg_values
):
# Moving the stack pointer more than 0x7FFF on MIPS
# TODO: This instruction needs to be ignored later in translation, in the
# same way that `addiu $sp, $sp, N` is ignored in handle_addi_real
assert isinstance(inst.args[2], Register)
info.allocated_stack_size = temp_reg_values[inst.args[2]]
elif arch_mnemonic == "ppc:stwu" and inst.args[0] == arch.stack_pointer_reg:
# Moving the stack pointer on PPC
assert isinstance(inst.args[1], AsmAddressMode)
assert isinstance(inst.args[1].lhs, AsmLiteral)
info.allocated_stack_size = abs(inst.args[1].lhs.signed_value())
elif (
arch_mnemonic == "mips:move"
and inst.args[0] == arch.frame_pointer_reg
and inst.args[1] == arch.stack_pointer_reg
):
# "move fp, sp" very likely means the code is compiled with frame
# pointers enabled; thus fp should be treated the same as sp.
info.uses_framepointer = True
elif (
arch_mnemonic
in [
"mips:sw",
"mips:swc1",
"mips:sdc1",
"ppc:stw",
"ppc:stmw",
"ppc:stfd",
"ppc:psq_st",
]
and isinstance(inst.args[0], Register)
and inst.args[0] in arch.saved_regs
and isinstance(inst.args[1], AsmAddressMode)
and inst.args[1].rhs == arch.stack_pointer_reg
and (
inst.args[0] not in info.callee_save_regs
or arch_mnemonic == "ppc:psq_st"
)
):
# Initial saving of callee-save register onto the stack.
if inst.args[0] in (arch.return_address_reg, Register("r0")):
# Saving the return address on the stack.
info.is_leaf = False
# The registers & their stack accesses must be matched up in ArchAsm.parse
for reg, mem in zip(inst.inputs, inst.outputs):
if isinstance(reg, Register) and isinstance(mem, StackLocation):
assert mem.symbolic_offset is None
stack_offset = mem.offset
if arch_mnemonic != "ppc:psq_st":
# psq_st instructions store the same register as stfd, just
# as packed singles instead. Prioritize the stfd.
info.callee_save_regs.add(reg)
callee_saved_offsets.append(stack_offset)
elif arch_mnemonic == "ppc:mflr" and inst.args[0] == Register("r0"):
info.is_leaf = False
elif arch_mnemonic == "mips:li" and inst.args[0] in arch.temp_regs:
assert isinstance(inst.args[0], Register)
assert isinstance(inst.args[1], AsmLiteral)
temp_reg_values[inst.args[0]] = inst.args[1].value
elif (
arch_mnemonic == "mips:ori"
and inst.args[0] == inst.args[1]
and inst.args[0] in temp_reg_values
):
assert isinstance(inst.args[0], Register)
assert isinstance(inst.args[2], AsmLiteral)
temp_reg_values[inst.args[0]] |= inst.args[2].value
if not info.is_leaf:
# Iterate over the whole function, not just the first basic block,
# to estimate the boundary for the subroutine argument region
info.subroutine_arg_top = info.allocated_stack_size
for node in flow_graph.nodes:
for inst in node.block.instructions:
arch_mnemonic = inst.arch_mnemonic(arch)
if (
arch_mnemonic in ["mips:lw", "mips:lwc1", "mips:ldc1", "ppc:lwz"]
and isinstance(inst.args[1], AsmAddressMode)
and inst.args[1].rhs == arch.stack_pointer_reg
and inst.args[1].lhs_as_literal() >= 16
):
info.subroutine_arg_top = min(
info.subroutine_arg_top, inst.args[1].lhs_as_literal()
)
elif (
arch_mnemonic == "mips:addiu"
and inst.args[0] != arch.stack_pointer_reg
and inst.args[1] == arch.stack_pointer_reg
and isinstance(inst.args[2], AsmLiteral)
and inst.args[2].value < info.allocated_stack_size
):
info.subroutine_arg_top = min(
info.subroutine_arg_top, inst.args[2].value
)
# Compute the bounds of the callee-saved register region, including padding
if callee_saved_offsets:
callee_saved_offsets.sort()
bottom = callee_saved_offsets[0]
# Both IDO & GCC save registers in two subregions:
# (a) One for double-sized registers
# (b) One for word-sized registers, padded to a multiple of 8 bytes
# IDO has (a) lower than (b); GCC has (b) lower than (a)
# Check that there are no gaps in this region, other than a single
# 4-byte word between subregions.
top = bottom
internal_padding_added = False
for offset in callee_saved_offsets:
if offset != top:
if not internal_padding_added and offset == top + 4:
internal_padding_added = True
else:
raise DecompFailure(
f"Gap in callee-saved word stack region. "
f"Saved: {callee_saved_offsets}, "
f"gap at: {offset} != {top}."
)
top = offset + 4
info.callee_save_reg_region = (bottom, top)
# Subroutine arguments must be at the very bottom of the stack, so they
# must come after the callee-saved region
info.subroutine_arg_top = min(info.subroutine_arg_top, bottom)
# Use a struct to represent the stack layout. If the struct is provided in the context,
# its fields will be used for variable types & names.
stack_struct_name = f"_mips2c_stack_{function.name}"
stack_struct = global_info.typepool.get_struct_by_tag_name(
stack_struct_name, global_info.typemap
)
if stack_struct is not None:
if stack_struct.size != info.allocated_stack_size:
raise DecompFailure(
f"Function {function.name} has a provided stack type {stack_struct_name} "
f"with size {stack_struct.size}, but the detected stack size was "
f"{info.allocated_stack_size}."
)
else:
stack_struct = StructDeclaration.unknown(
global_info.typepool,
size=info.allocated_stack_size,
tag_name=stack_struct_name,
)
# Mark the struct as a stack struct so we never try to use a reference to the struct itself
stack_struct.is_stack = True
stack_struct.new_field_prefix = "sp"
# This acts as the type of the $sp register
info.stack_pointer_type = Type.ptr(Type.struct(stack_struct))
return info
def format_hex(val: int) -> str:
return format(val, "x").upper()
def escape_byte(b: int) -> bytes:
table = {
b"\0": b"\\0",
b"\b": b"\\b",
b"\f": b"\\f",
b"\n": b"\\n",
b"\r": b"\\r",
b"\t": b"\\t",
b"\v": b"\\v",
b"\\": b"\\\\",
b'"': b'\\"',
}
bs = bytes([b])
if bs in table:
return table[bs]
if b < 0x20 or b in (0xFF, 0x7F):
return f"\\x{b:02x}".encode("ascii")
return bs
@dataclass(eq=False)
class Var:
stack_info: StackInfo = field(repr=False)
prefix: str
num_usages: int = 0
name: Optional[str] = None
def format(self, fmt: Formatter) -> str:
if self.name is None:
self.name = self.stack_info.temp_var(self.prefix)
return self.name
def __str__(self) -> str:
return "<temp>"
class Expression(abc.ABC):
type: Type
@abc.abstractmethod
def dependencies(self) -> List["Expression"]:
...
def use(self) -> None:
"""Mark an expression as "will occur in the output". Various subclasses
override this to provide special behavior; for instance, EvalOnceExpr
checks if it occurs more than once in the output and if so emits a temp.
It is important to get the number of use() calls correct:
* if use() is called but the expression is not emitted, it may cause
function calls to be silently dropped.
* if use() is not called but the expression is emitted, it may cause phi
variables to be printed as unnamed-phi($reg), without any assignment
to that phi.
* if use() is called once but the expression is emitted twice, it may
cause function calls to be duplicated."""
for expr in self.dependencies():
expr.use()
@abc.abstractmethod
def format(self, fmt: Formatter) -> str:
...
def __str__(self) -> str:
"""Stringify an expression for debug purposes. The output can change
depending on when this is called, e.g. because of EvalOnceExpr state.
To avoid using it by accident, output is quoted."""
fmt = Formatter(debug=True)
return '"' + self.format(fmt) + '"'
class Condition(Expression):
@abc.abstractmethod
def negated(self) -> "Condition":
...
class Statement(abc.ABC):
@abc.abstractmethod
def should_write(self) -> bool:
...
@abc.abstractmethod
def format(self, fmt: Formatter) -> str:
...
def __str__(self) -> str:
"""Stringify a statement for debug purposes. The output can change
depending on when this is called, e.g. because of EvalOnceExpr state.
To avoid using it by accident, output is quoted."""
fmt = Formatter(debug=True)
return '"' + self.format(fmt) + '"'
@dataclass(frozen=True, eq=False)
class ErrorExpr(Condition):
desc: Optional[str] = None
type: Type = field(default_factory=Type.any_reg)
def dependencies(self) -> List[Expression]:
return []
def negated(self) -> "Condition":
return self
def format(self, fmt: Formatter) -> str:
if self.desc is not None:
return f"MIPS2C_ERROR({self.desc})"
return "MIPS2C_ERROR()"
@dataclass(frozen=True)
class CommentExpr(Expression):
expr: Expression
type: Type = field(compare=False)
prefix: Optional[str] = None
suffix: Optional[str] = None
def dependencies(self) -> List[Expression]:
return [self.expr]
def format(self, fmt: Formatter) -> str:
expr_str = self.expr.format(fmt)
if fmt.coding_style.comment_style == CodingStyle.CommentStyle.NONE:
return expr_str
prefix_str = f"/* {self.prefix} */ " if self.prefix is not None else ""
suffix_str = f" /* {self.suffix} */" if self.suffix is not None else ""
return f"{prefix_str}{expr_str}{suffix_str}"
@staticmethod
def wrap(
expr: Expression, prefix: Optional[str] = None, suffix: Optional[str] = None
) -> Expression:
if prefix is None and suffix is None:
return expr
return CommentExpr(expr=expr, type=expr.type, prefix=prefix, suffix=suffix)
@dataclass(frozen=True, eq=False)
class SecondF64Half(Expression):
type: Type = field(default_factory=Type.any_reg)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
return "(second half of f64)"
@dataclass(frozen=True, eq=False)
class CarryBit(Expression):
type: Type = field(default_factory=Type.intish)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
return "MIPS2C_CARRY"
@staticmethod
def add_to(expr: Expression) -> "BinaryOp":
return fold_divmod(BinaryOp.intptr(expr, "+", CarryBit()))
@staticmethod
def sub_from(expr: Expression) -> "BinaryOp":
return BinaryOp.intptr(expr, "-", UnaryOp("!", CarryBit(), type=Type.intish()))
@dataclass(frozen=True, eq=False)
class BinaryOp(Condition):
left: Expression
op: str
right: Expression
type: Type
@staticmethod
def int(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_intish(left), op=op, right=as_intish(right), type=Type.intish()
)
@staticmethod
def int64(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_int64(left), op=op, right=as_int64(right), type=Type.int64()
)
@staticmethod
def intptr(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_intptr(left), op=op, right=as_intptr(right), type=Type.intptr()
)
@staticmethod
def icmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_intptr(left), op=op, right=as_intptr(right), type=Type.bool()
)
@staticmethod
def scmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_sintish(left, silent=True),
op=op,
right=as_sintish(right, silent=True),
type=Type.bool(),
)
@staticmethod
def sintptr_cmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_type(left, Type.sintptr(), False),
op=op,
right=as_type(right, Type.sintptr(), False),
type=Type.bool(),
)
@staticmethod
def ucmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_uintish(left), op=op, right=as_uintish(right), type=Type.bool()
)
@staticmethod
def uintptr_cmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_type(left, Type.uintptr(), False),
op=op,
right=as_type(right, Type.uintptr(), False),
type=Type.bool(),
)
@staticmethod
def fcmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_f32(left),
op=op,
right=as_f32(right),
type=Type.bool(),
)
@staticmethod
def dcmp(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_f64(left),
op=op,
right=as_f64(right),
type=Type.bool(),
)
@staticmethod
def sint(
left: Expression, op: str, right: Expression, *, silent: bool = False
) -> "BinaryOp":
return BinaryOp(
left=as_sintish(left, silent=silent),
op=op,
right=as_sintish(right, silent=silent),
type=Type.s32(),
)
@staticmethod
def uint(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_uintish(left), op=op, right=as_uintish(right), type=Type.u32()
)
@staticmethod
def s64(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(left=as_s64(left), op=op, right=as_s64(right), type=Type.s64())
@staticmethod
def u64(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(left=as_u64(left), op=op, right=as_u64(right), type=Type.u64())
@staticmethod
def f32(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_f32(left),
op=op,
right=as_f32(right),
type=Type.f32(),
)
@staticmethod
def f64(left: Expression, op: str, right: Expression) -> "BinaryOp":
return BinaryOp(
left=as_f64(left),
op=op,
right=as_f64(right),
type=Type.f64(),
)
def is_comparison(self) -> bool:
return self.op in ["==", "!=", ">", "<", ">=", "<="]
def is_floating(self) -> bool:
return self.left.type.is_float() and self.right.type.is_float()
def negated(self) -> "Condition":
if (
self.op in ["&&", "||"]
and isinstance(self.left, Condition)
and isinstance(self.right, Condition)
):
# DeMorgan's Laws
return BinaryOp(
left=self.left.negated(),
op={"&&": "||", "||": "&&"}[self.op],
right=self.right.negated(),
type=Type.bool(),
)
if not self.is_comparison() or (
self.is_floating() and self.op in ["<", ">", "<=", ">="]
):
# Floating-point comparisons cannot be negated in any nice way,
# due to nans.
return UnaryOp("!", self, type=Type.bool())
return BinaryOp(
left=self.left,
op={"==": "!=", "!=": "==", ">": "<=", "<": ">=", ">=": "<", "<=": ">"}[
self.op
],
right=self.right,
type=Type.bool(),
)
def dependencies(self) -> List[Expression]:
return [self.left, self.right]
def format(self, fmt: Formatter) -> str:
left_expr = late_unwrap(self.left)
right_expr = late_unwrap(self.right)
if (
self.is_comparison()
and isinstance(left_expr, Literal)
and not isinstance(right_expr, Literal)
):
return BinaryOp(
left=right_expr,
op=self.op.translate(str.maketrans("<>", "><")),
right=left_expr,
type=self.type,
).format(fmt)
if (
not self.is_floating()
and isinstance(right_expr, Literal)
and right_expr.value < 0
):
if self.op == "+":
neg = Literal(value=-right_expr.value, type=right_expr.type)
sub = BinaryOp(op="-", left=left_expr, right=neg, type=self.type)
return sub.format(fmt)
if self.op in ("&", "|"):
neg = Literal(value=~right_expr.value, type=right_expr.type)
right = UnaryOp("~", neg, type=Type.any_reg())
expr = BinaryOp(op=self.op, left=left_expr, right=right, type=self.type)
return expr.format(fmt)
# For commutative, left-associative operations, strip unnecessary parentheses.
lhs = left_expr.format(fmt)
if (
isinstance(left_expr, BinaryOp)
and left_expr.op == self.op
and self.op in ASSOCIATIVE_OPS
):
lhs = lhs[1:-1]
# For certain operators, use base-10 (decimal) for the RHS
if self.op in ("/", "%") and isinstance(right_expr, Literal):
rhs = right_expr.format(fmt, force_dec=True)
else:
rhs = right_expr.format(fmt)
# These aren't real operators (or functions); format them as a fn call
if self.op in PSEUDO_FUNCTION_OPS:
return f"{self.op}({lhs}, {rhs})"
return f"({lhs} {self.op} {rhs})"
@dataclass(frozen=True, eq=False)
class TernaryOp(Expression):
cond: Condition
left: Expression
right: Expression
type: Type
def dependencies(self) -> List[Expression]:
return [self.cond, self.left, self.right]
def format(self, fmt: Formatter) -> str:
cond_str = simplify_condition(self.cond).format(fmt)
left_str = self.left.format(fmt)
right_str = self.right.format(fmt)
return f"({cond_str} ? {left_str} : {right_str})"
@dataclass(frozen=True, eq=False)
class UnaryOp(Condition):
op: str
expr: Expression
type: Type
def dependencies(self) -> List[Expression]:
return [self.expr]
@staticmethod
def sint(op: str, expr: Expression) -> "UnaryOp":
expr = as_sintish(expr, silent=True)
return UnaryOp(
op=op,
expr=expr,
type=expr.type,
)
def negated(self) -> "Condition":
if self.op == "!" and isinstance(self.expr, (UnaryOp, BinaryOp)):
return self.expr
return UnaryOp("!", self, type=Type.bool())
def format(self, fmt: Formatter) -> str:
# These aren't real operators (or functions); format them as a fn call
if self.op in PSEUDO_FUNCTION_OPS:
return f"{self.op}({self.expr.format(fmt)})"
return f"{self.op}{self.expr.format(fmt)}"
@dataclass(frozen=True, eq=False)
class ExprCondition(Condition):
expr: Expression
type: Type
is_negated: bool = False
def dependencies(self) -> List[Expression]:
return [self.expr]
def negated(self) -> "Condition":
return ExprCondition(self.expr, self.type, not self.is_negated)
def format(self, fmt: Formatter) -> str:
neg = "!" if self.is_negated else ""
return f"{neg}{self.expr.format(fmt)}"
@dataclass(frozen=True, eq=False)
class CommaConditionExpr(Condition):
statements: List["Statement"]
condition: "Condition"
type: Type = Type.bool()
def dependencies(self) -> List[Expression]:
assert False, "CommaConditionExpr should not be used within translate.py"
return []
def negated(self) -> "Condition":
return CommaConditionExpr(self.statements, self.condition.negated())
def format(self, fmt: Formatter) -> str:
comma_joined = ", ".join(
stmt.format(fmt).rstrip(";") for stmt in self.statements
)
return f"({comma_joined}, {self.condition.format(fmt)})"
@dataclass(frozen=True, eq=False)
class Cast(Expression):
expr: Expression
type: Type
reinterpret: bool = False
silent: bool = True
def dependencies(self) -> List[Expression]:
return [self.expr]
def use(self) -> None:
# Try to unify, to make stringification output better.
self.expr.type.unify(self.type)
super().use()
def needed_for_store(self) -> bool:
if not self.reinterpret:
# int <-> float casts should be emitted even for stores.
return True
if not self.expr.type.unify(self.type):
# Emit casts when types fail to unify.
return True
return False
def is_trivial(self) -> bool:
return (
self.reinterpret
and self.expr.type.is_float() == self.type.is_float()
and is_trivial_expression(self.expr)
)
def format(self, fmt: Formatter) -> str:
if self.reinterpret and self.expr.type.is_float() != self.type.is_float():
# This shouldn't happen, but mark it in the output if it does.
if fmt.valid_syntax:
return (
f"MIPS2C_BITWISE({self.type.format(fmt)}, {self.expr.format(fmt)})"
)
return f"(bitwise {self.type.format(fmt)}) {self.expr.format(fmt)}"
if self.reinterpret and (
self.silent
or (is_type_obvious(self.expr) and self.expr.type.unify(self.type))
):
return self.expr.format(fmt)
if fmt.skip_casts:
return self.expr.format(fmt)
# Function casts require special logic because function calls have
# higher precedence than casts
fn_sig = self.type.get_function_pointer_signature()
if fn_sig:
prototype_sig = self.expr.type.get_function_pointer_signature()
if not prototype_sig or not prototype_sig.unify_with_args(fn_sig):
# A function pointer cast is required if the inner expr is not
# a function pointer, or has incompatible argument types
return f"(({self.type.format(fmt)}) {self.expr.format(fmt)})"
if not prototype_sig.return_type.unify(fn_sig.return_type):
# Only cast the return value of the call
return f"({fn_sig.return_type.format(fmt)}) {self.expr.format(fmt)}"
# No cast needed
return self.expr.format(fmt)
return f"({self.type.format(fmt)}) {self.expr.format(fmt)}"
@dataclass(frozen=True, eq=False)
class FuncCall(Expression):
function: Expression
args: List[Expression]
type: Type
def dependencies(self) -> List[Expression]:
return self.args + [self.function]
def format(self, fmt: Formatter) -> str:
# TODO: The function type may have a different number of params than it had
# when the FuncCall was created. Should we warn that there may be the wrong
# number of arguments at this callsite?
args = ", ".join(format_expr(arg, fmt) for arg in self.args)
return f"{self.function.format(fmt)}({args})"
@dataclass(frozen=True, eq=True)
class LocalVar(Expression):
value: int
type: Type = field(compare=False)
path: Optional[AccessPath] = field(compare=False)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
fallback_name = f"unksp{format_hex(self.value)}"
if self.path is None:
return fallback_name
name = StructAccess.access_path_to_field_name(self.path, fmt)
if name.startswith("->"):
return name[2:]
return fallback_name
def toplevel_decl(self, fmt: Formatter) -> Optional[str]:
"""Return a declaration for this LocalVar, if required."""
# If len(self.path) > 2, then this local is an inner field of another
# local, so it doesn't need to be declared.
if (
self.path is None
or len(self.path) != 2
or not isinstance(self.path[1], str)
):
return None
return self.type.to_decl(self.path[1], fmt)
@dataclass(frozen=True, eq=False)
class RegisterVar(Expression):
reg: Register
name: str
type: Type
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
return self.name
@dataclass(frozen=True, eq=True)
class PassedInArg(Expression):
value: int
copied: bool = field(compare=False)
stack_info: StackInfo = field(compare=False, repr=False)
type: Type = field(compare=False)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
assert self.value % 4 == 0
name = self.stack_info.get_param_name(self.value)
return name or f"arg{format_hex(self.value // 4)}"
@dataclass(frozen=True, eq=True)
class SubroutineArg(Expression):
value: int
type: Type = field(compare=False)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter) -> str:
return f"subroutine_arg{format_hex(self.value // 4)}"
@dataclass(eq=True, unsafe_hash=True)
class StructAccess(Expression):
# Represents struct_var->offset.
# This has eq=True since it represents a live expression and not an access
# at a certain point in time -- this sometimes helps get rid of phi nodes.
# prevent_later_uses makes sure it's not used after writes/function calls
# that may invalidate it.
struct_var: Expression
offset: int
target_size: Optional[int]
field_path: Optional[AccessPath] = field(compare=False)
stack_info: Optional[StackInfo] = field(compare=False, repr=False)
type: Type = field(compare=False)
checked_late_field_path: bool = field(default=False, compare=False)
def __post_init__(self) -> None:
# stack_info is used to resolve field_path late
assert (
self.stack_info is not None or self.field_path is not None
), "Must provide at least one of (stack_info, field_path)"
self.assert_valid_field_path(self.field_path)
@staticmethod
def assert_valid_field_path(path: Optional[AccessPath]) -> None:
assert path is None or (
path and isinstance(path[0], int)
), "The first element of the field path, if present, must be an int"
@classmethod
def access_path_to_field_name(cls, path: AccessPath, fmt: Formatter) -> str:
"""
Convert an access path into a dereferencing field name, like the following examples:
- `[0, "foo", 3, "bar"]` into `"->foo[3].bar"`
- `[0, 3, "bar"]` into `"[0][3].bar"`
- `[0, 1, 2]` into `"[0][1][2]"
- `[0]` into `"[0]"`
The path must have at least one element, and the first element must be an int.
"""
cls.assert_valid_field_path(path)
output = ""
# Replace an initial "[0]." with "->"
if len(path) >= 2 and path[0] == 0 and isinstance(path[1], str):
output += f"->{path[1]}"
path = path[2:]
for p in path:
if isinstance(p, str):
output += f".{p}"
elif isinstance(p, int):
output += f"[{fmt.format_int(p)}]"
else:
static_assert_unreachable(p)
return output
def dependencies(self) -> List[Expression]:
return [self.struct_var]
def make_reference(self) -> Optional["StructAccess"]:
field_path = self.late_field_path()
if field_path and len(field_path) >= 2 and field_path[-1] == 0:
return replace(self, field_path=field_path[:-1])
return None
def late_field_path(self) -> Optional[AccessPath]:
# If we didn't have a type at the time when the struct access was
# constructed, but now we do, compute field name.
if self.field_path is None and not self.checked_late_field_path:
var = late_unwrap(self.struct_var)
# Format var to recursively resolve any late_field_path it has to
# potentially improve var.type before we look up our field name
var.format(Formatter())
field_path, field_type, _ = var.type.get_deref_field(
self.offset, target_size=self.target_size
)
if field_path is not None:
self.assert_valid_field_path(field_path)
self.field_path = field_path
self.type.unify(field_type)
self.checked_late_field_path = True
return self.field_path
def late_has_known_type(self) -> bool:
if self.late_field_path() is not None:
return True
assert (
self.stack_info is not None
), "StructAccess must have stack_info if field_path isn't set"
if self.offset == 0:
var = late_unwrap(self.struct_var)
if (
not self.stack_info.has_nonzero_access(var)
and isinstance(var, AddressOf)
and isinstance(var.expr, GlobalSymbol)
and var.expr.type_provided
):
return True
return False
def format(self, fmt: Formatter) -> str:
var = late_unwrap(self.struct_var)
has_nonzero_access = False
if self.stack_info is not None:
has_nonzero_access = self.stack_info.has_nonzero_access(var)
field_path = self.late_field_path()
if field_path is not None and field_path != [0]:
has_nonzero_access = True
elif fmt.valid_syntax and (self.offset != 0 or has_nonzero_access):
offset_str = fmt.format_int(self.offset)
return f"MIPS2C_FIELD({var.format(fmt)}, {Type.ptr(self.type).format(fmt)}, {offset_str})"
else:
prefix = "unk" + ("_" if fmt.coding_style.unknown_underscore else "")
field_path = [0, prefix + format_hex(self.offset)]
field_name = self.access_path_to_field_name(field_path, fmt)
# Rewrite `(&x)->y` to `x.y` by stripping `AddressOf` & setting deref=False
deref = True
if (
isinstance(var, AddressOf)
and not var.expr.type.is_array()
and field_name.startswith("->")
):
var = var.expr
field_name = field_name.replace("->", ".", 1)
deref = False
# Rewrite `x->unk0` to `*x` and `x.unk0` to `x`, unless has_nonzero_access
if self.offset == 0 and not has_nonzero_access:
return f"{'*' if deref else ''}{var.format(fmt)}"
return f"{parenthesize_for_struct_access(var, fmt)}{field_name}"
@dataclass(frozen=True, eq=True)
class ArrayAccess(Expression):
# Represents ptr[index]. eq=True for symmetry with StructAccess.
ptr: Expression
index: Expression
type: Type = field(compare=False)
def dependencies(self) -> List[Expression]:
return [self.ptr, self.index]
def format(self, fmt: Formatter) -> str:
base = parenthesize_for_struct_access(self.ptr, fmt)
index = format_expr(self.index, fmt)
return f"{base}[{index}]"
@dataclass(eq=False)
class GlobalSymbol(Expression):
symbol_name: str
type: Type
asm_data_entry: Optional[AsmDataEntry] = None
symbol_in_context: bool = False
type_provided: bool = False
initializer_in_typemap: bool = False
demangled_str: Optional[str] = None
def dependencies(self) -> List[Expression]:
return []
def is_string_constant(self) -> bool:
ent = self.asm_data_entry
if not ent or not ent.is_string:
return False
return len(ent.data) == 1 and isinstance(ent.data[0], bytes)
def format_string_constant(self, fmt: Formatter) -> str:
assert self.is_string_constant(), "checked by caller"
assert self.asm_data_entry and isinstance(self.asm_data_entry.data[0], bytes)
has_trailing_null = False
data = self.asm_data_entry.data[0]
while data and data[-1] == 0:
data = data[:-1]
has_trailing_null = True
data = b"".join(map(escape_byte, data))
strdata = data.decode("utf-8", "backslashreplace")
ret = f'"{strdata}"'
if not has_trailing_null:
ret += " /* not null-terminated */"
return ret
def format(self, fmt: Formatter) -> str:
return self.symbol_name
def potential_array_dim(self, element_size: int) -> Tuple[int, int]:
"""
Using the size of the symbol's `asm_data_entry` and a potential array element
size, return the corresponding array dimension and number of "extra" bytes left
at the end of the symbol's data.
If the extra bytes are nonzero, then it's likely that `element_size` is incorrect.
"""
# If we don't have the .data/.rodata entry for this symbol, we can't guess
# its array dimension. Jump tables are ignored and not treated as arrays.
if self.asm_data_entry is None or self.asm_data_entry.is_jtbl:
return 0, element_size
min_data_size, max_data_size = self.asm_data_entry.size_range_bytes()
if element_size > max_data_size:
# The type is too big for the data (not an array)
return 0, max_data_size
# Check if it's possible that this symbol is not an array, and is just 1 element
if min_data_size <= element_size <= max_data_size and not self.type.is_array():
return 1, 0
array_dim, extra_bytes = divmod(min_data_size, element_size)
if extra_bytes != 0:
# If it's not possible to make an exact multiple of element_size by incorporating
# bytes from the padding, then indicate that in the return value.
padding_bytes = element_size - extra_bytes
if min_data_size + padding_bytes > max_data_size:
return array_dim, extra_bytes
# Include potential padding in the array. Although this is unlikely to match the original C,
# it's much easier to manually remove all or some of these elements than to add them back in.
return max_data_size // element_size, 0
@dataclass(frozen=True, eq=True)
class Literal(Expression):
value: int
type: Type = field(compare=False, default_factory=Type.any)
elide_cast: bool = field(compare=False, default=False)
def dependencies(self) -> List[Expression]:
return []
def format(self, fmt: Formatter, force_dec: bool = False) -> str:
enum_name = self.type.get_enum_name(self.value)
if enum_name is not None:
return enum_name
if self.type.is_likely_float():
if self.type.get_size_bits() == 64:
return format_f64_imm(self.value)
else:
return format_f32_imm(self.value) + "f"
if self.type.is_pointer() and self.value == 0:
return "NULL"
prefix = ""
suffix = ""
if not fmt.skip_casts and not self.elide_cast:
if self.type.is_pointer():
prefix = f"({self.type.format(fmt)})"
if self.type.is_unsigned():
suffix = "U"
if force_dec:
value = str(self.value)
else:
size_bits = self.type.get_size_bits()
v = self.value
# The top 2 bits are tested rather than just the sign bit
# to help prevent N64 VRAM pointers (0x80000000+) turning negative
if (
self.type.is_signed()
and size_bits
and v & (1 << (size_bits - 1))
and v > (3 << (size_bits - 2))
and v < 2 ** size_bits
):
v -= 1 << size_bits
value = fmt.format_int(v, size_bits=size_bits)
return prefix + value + suffix
def likely_partial_offset(self) -> bool:
return self.value % 2 ** 15 in (0, 2 ** 15 - 1) and self.value < 0x1000000
@dataclass(frozen=True, eq=True)
class AddressOf(Expression):
expr: Expression
type: Type = field(compare=False, default_factory=Type.ptr)
def dependencies(self) -> List[Expression]:
return [self.expr]
def format(self, fmt: Formatter) -> str:
if isinstance(self.expr, GlobalSymbol):
if self.expr.is_string_constant():
return self.expr.format_string_constant(fmt)
if self.expr.type.is_array():
return f"{self.expr.format(fmt)}"
if self.expr.type.is_function():
# Functions are automatically converted to function pointers
# without an explicit `&` by the compiler
return f"{self.expr.format(fmt)}"
if isinstance(self.expr, StructAccess):
# Simplify `&x[0]` into `x`
ref = self.expr.make_reference()
if ref:
return f"{ref.format(fmt)}"
return f"&{self.expr.format(fmt)}"
@dataclass(frozen=True)
class Lwl(Expression):
load_expr: Expression
key: Tuple[int, object]
type: Type = field(compare=False, default_factory=Type.any_reg)
def dependencies(self) -> List[Expression]:
return [self.load_expr]
def format(self, fmt: Formatter) -> str:
return f"MIPS2C_LWL({self.load_expr.format(fmt)})"
@dataclass(frozen=True)
class Load3Bytes(Expression):
load_expr: Expression
type: Type = field(compare=False, default_factory=Type.any_reg)
def dependencies(self) -> List[Expression]:
return [self.load_expr]
def format(self, fmt: Formatter) -> str:
if fmt.valid_syntax:
return f"MIPS2C_FIRST3BYTES({self.load_expr.format(fmt)})"
return f"(first 3 bytes) {self.load_expr.format(fmt)}"
@dataclass(frozen=True)
class UnalignedLoad(Expression):
load_expr: Expression
type: Type = field(compare=False, default_factory=Type.any_reg)
def dependencies(self) -> List[Expression]:
return [self.load_expr]
def format(self, fmt: Formatter) -> str:
if fmt.valid_syntax:
return f"MIPS2C_UNALIGNED32({self.load_expr.format(fmt)})"
return f"(unaligned s32) {self.load_expr.format(fmt)}"
@dataclass(frozen=False, eq=False)
class EvalOnceExpr(Expression):
wrapped_expr: Expression
var: Var
type: Type
# True for function calls/errors
emit_exactly_once: bool
# Mutable state:
# True if this EvalOnceExpr should be totally transparent and not emit a variable,
# It may dynamically change from true to false due to forced emissions.
# Initially, it is based on is_trivial_expression.
trivial: bool
# True if this EvalOnceExpr must emit a variable (see RegMeta.force)
forced_emit: bool = False
# The number of expressions that depend on this EvalOnceExpr; we emit a variable
# if this is > 1.
num_usages: int = 0
def dependencies(self) -> List[Expression]:
# (this is a bit iffy since state can change over time, but improves uses_expr)
if self.need_decl():
return []
return [self.wrapped_expr]
def use(self) -> None:
self.num_usages += 1
if self.trivial or (self.num_usages == 1 and not self.emit_exactly_once):
self.wrapped_expr.use()
def force(self) -> None:
# Transition to non-trivial, and mark as used multiple times to force a var.
# TODO: If it was originally trivial, we may previously have marked its
# wrappee used multiple times, even though we now know that it should
# have been marked just once... We could fix that by moving marking of
# trivial EvalOnceExpr's to the very end. At least the consequences of
# getting this wrong are pretty mild -- it just causes extraneous var
# emission in rare cases.
self.trivial = False
self.forced_emit = True
self.use()
self.use()
def need_decl(self) -> bool:
return self.num_usages > 1 and not self.trivial
def format(self, fmt: Formatter) -> str:
if not self.need_decl():
return self.wrapped_expr.format(fmt)
else:
return self.var.format(fmt)
@dataclass(frozen=False, eq=False)
class PhiExpr(Expression):
reg: Register
node: Node
type: Type
used_phis: List["PhiExpr"]
name: Optional[str] = None
num_usages: int = 0
replacement_expr: Optional[Expression] = None
used_by: Optional["PhiExpr"] = None
def dependencies(self) -> List[Expression]:
return []
def get_var_name(self) -> str:
return self.name or f"unnamed-phi({self.reg.register_name})"
def use(self, from_phi: Optional["PhiExpr"] = None) -> None:
if self.num_usages == 0:
self.used_phis.append(self)
self.used_by = from_phi
self.num_usages += 1
if self.used_by != from_phi:
self.used_by = None
if self.replacement_expr is not None:
self.replacement_expr.use()
def propagates_to(self) -> "PhiExpr":
"""Compute the phi that stores to this phi should propagate to. This is
usually the phi itself, but if the phi is only once for the purpose of
computing another phi, we forward the store there directly. This is
admittedly a bit sketchy, in case the phi is in scope here and used
later on... but we have that problem with regular phi assignments as
well."""
if self.used_by is None or self.replacement_expr is not None:
return self
return self.used_by.propagates_to()
def format(self, fmt: Formatter) -> str:
if self.replacement_expr:
return self.replacement_expr.format(fmt)
return self.get_var_name()
@dataclass
class SwitchControl:
control_expr: Expression
jump_table: Optional[GlobalSymbol] = None
offset: int = 0
is_irregular: bool = False
def matches_guard_condition(self, cond: Condition) -> bool:
"""
Return True if `cond` is one of:
- `((control_expr + (-offset)) >= len(jump_table))`, if `offset != 0`
- `(control_expr >= len(jump_table))`, if `offset == 0`
These are the appropriate bounds checks before using `jump_table`.
"""
cmp_expr = simplify_condition(cond)
if not isinstance(cmp_expr, BinaryOp) or cmp_expr.op not in (">=", ">"):
return False
cmp_exclusive = cmp_expr.op == ">"
# The LHS may have been wrapped in a u32 cast
left_expr = late_unwrap(cmp_expr.left)
if isinstance(left_expr, Cast):
left_expr = late_unwrap(left_expr.expr)
if self.offset != 0:
if (
not isinstance(left_expr, BinaryOp)
or late_unwrap(left_expr.left) != late_unwrap(self.control_expr)
or left_expr.op != "+"
or late_unwrap(left_expr.right) != Literal(-self.offset)
):
return False
elif left_expr != late_unwrap(self.control_expr):
return False
right_expr = late_unwrap(cmp_expr.right)
if (
self.jump_table is None
or self.jump_table.asm_data_entry is None
or not self.jump_table.asm_data_entry.is_jtbl
or not isinstance(right_expr, Literal)
):
return False
# Count the number of labels (exclude padding bytes)
jump_table_len = sum(
isinstance(e, str) for e in self.jump_table.asm_data_entry.data
)
return right_expr.value + int(cmp_exclusive) == jump_table_len
@staticmethod
def irregular_from_expr(control_expr: Expression) -> "SwitchControl":
"""
Return a SwitchControl representing a "irregular" switch statement.
The switch does not have a single jump table; instead it is a series of
if statements & other switches.
"""
return SwitchControl(
control_expr=control_expr,
jump_table=None,
offset=0,
is_irregular=True,
)
@staticmethod
def from_expr(expr: Expression) -> "SwitchControl":
"""
Try to convert `expr` into a SwitchControl from one of the following forms:
- `*(&jump_table + (control_expr * 4))`
- `*(&jump_table + ((control_expr + (-offset)) * 4))`
If `offset` is not present, it defaults to 0.
If `expr` does not match, return a thin wrapper around the input expression,
with `jump_table` set to `None`.
"""
# The "error" expression we use if we aren't able to parse `expr`
error_expr = SwitchControl(expr)
# Match `*(&jump_table + (control_expr * 4))`
struct_expr = early_unwrap(expr)
if not isinstance(struct_expr, StructAccess) or struct_expr.offset != 0:
return error_expr
add_expr = early_unwrap(struct_expr.struct_var)
if not isinstance(add_expr, BinaryOp) or add_expr.op != "+":
return error_expr
# Check for either `*(&jump_table + (control_expr * 4))` and `*((control_expr * 4) + &jump_table)`
left_expr, right_expr = early_unwrap(add_expr.left), early_unwrap(
add_expr.right
)
if isinstance(left_expr, AddressOf) and isinstance(
left_expr.expr, GlobalSymbol
):
jtbl_addr_expr, mul_expr = left_expr, right_expr
elif isinstance(right_expr, AddressOf) and isinstance(
right_expr.expr, GlobalSymbol
):
mul_expr, jtbl_addr_expr = left_expr, right_expr
else:
return error_expr
jump_table = jtbl_addr_expr.expr
assert isinstance(jump_table, GlobalSymbol)
if (
not isinstance(mul_expr, BinaryOp)
or mul_expr.op != "*"
or early_unwrap(mul_expr.right) != Literal(4)
):
return error_expr
control_expr = mul_expr.left
# Optionally match `control_expr + (-offset)`
offset = 0
uw_control_expr = early_unwrap(control_expr)
if isinstance(uw_control_expr, BinaryOp) and uw_control_expr.op == "+":
offset_lit = early_unwrap(uw_control_expr.right)
if isinstance(offset_lit, Literal):
control_expr = uw_control_expr.left
offset = -offset_lit.value
# Check that it is really a jump table
if jump_table.asm_data_entry is None or not jump_table.asm_data_entry.is_jtbl:
return error_expr
return SwitchControl(control_expr, jump_table, offset)
@dataclass
class EvalOnceStmt(Statement):
expr: EvalOnceExpr
def need_decl(self) -> bool:
return self.expr.need_decl()
def should_write(self) -> bool:
if self.expr.emit_exactly_once:
return self.expr.num_usages != 1
else:
return self.need_decl()
def format(self, fmt: Formatter) -> str:
val_str = format_expr(elide_casts_for_store(self.expr.wrapped_expr), fmt)
if self.expr.emit_exactly_once and self.expr.num_usages == 0:
return f"{val_str};"
return f"{self.expr.var.format(fmt)} = {val_str};"
@dataclass
class SetPhiStmt(Statement):
phi: PhiExpr
|
expr: Expression
def should_write(self) -> bool:
expr = self.expr
if isinstance(expr, PhiExpr) and expr.propagates_to() != expr:
# When we have phi1 = phi2, and phi2 is only used in this place,
# the SetPhiStmt for phi2 will store directly to phi1 and we can
# skip this store.
assert expr.propagates_to() == self.phi.propagates_to()
return False
if late_unwrap(expr) == self.phi.propagates_to():
# Elide "phi = phi".
return False
return True
def format(self, fmt: Formatter) -> str:
return format_assignment(self.phi.propagates_to(), self.expr, fmt)
@dataclass
class ExprStmt(Statement):
expr: Expression
def should_write(self) -> bool:
return True
def format(self, fmt: Formatter) -> str:
return f"{format_expr(self.expr, fmt)};"
@dataclass
class StoreStmt(Statement):
source: Expression
dest: Expression
def should_write(self) -> bool:
return True
def format(self, fmt: Formatter) -> str:
dest = self.dest
source = self.source
if (
isinstance(dest, StructAccess) and dest.late_has_known_type()
) or isinstance(dest, (ArrayAccess, LocalVar, RegisterVar, SubroutineArg)):
# Known destination; fine to elide some casts.
source = elide_casts_for_store(source)
return format_assignment(dest, source, fmt)
@dataclass
class CommentStmt(Statement):
contents: str
def should_write(self) -> bool:
return True
def format(self, fmt: Formatter) -> str:
return f"// {self.contents}"
def error_stmt(msg: str) -> ExprStmt:
return ExprStmt(ErrorExpr(msg))
@dataclass(frozen=True)
class AddressMode:
offset: int
rhs: Register
def __str__(self) -> str:
if self.offset:
return f"{self.offset}({self.rhs})"
else:
return f"({self.rhs})"
@dataclass(frozen=True)
class RawSymbolRef:
offset: int
sym: AsmGlobalSymbol
def __str__(self) -> str:
if self.offset:
return f"{self.sym.symbol_name} + {self.offset}"
else:
return self.sym.symbol_name
@dataclass
class RegMeta:
# True if this regdata is unchanged from the start of the block
inherited: bool = False
# True if this regdata is read by some later node
is_read: bool = False
# True if the value derives solely from function call return values
function_return: bool = False
# True if the value derives solely from regdata's with is_read = True,
# function_return = True, or is a passed in argument
uninteresting: bool = False
# True if the regdata must be replaced by variable if it is ever read
force: bool = False
# True if the regdata was assigned by an Instruction marked as in_pattern;
# it was part of a matched IR pattern but couldn't be elided at the time
in_pattern: bool = False
@dataclass
class RegData:
value: Expression
meta: RegMeta
@dataclass
class RegInfo:
stack_info: StackInfo = field(repr=False)
contents: Dict[Register, RegData] = field(default_factory=dict)
read_inherited: Set[Register] = field(default_factory=set)
_active_instr: Optional[Instruction] = None
def __getitem__(self, key: Register) -> Expression:
if self._active_instr is not None and key not in self._active_instr.inputs:
lineno = self._active_instr.meta.lineno
return ErrorExpr(f"Read from unset register {key} on line {lineno}")
if key == Register("zero"):
return Literal(0)
data = self.contents.get(key)
if data is None:
return ErrorExpr(f"Read from unset register {key}")
ret = data.value
data.meta.is_read = True
if data.meta.inherited:
self.read_inherited.add(key)
if isinstance(ret, PassedInArg) and not ret.copied:
# Create a new argument object to better distinguish arguments we
# are called with from arguments passed to subroutines. Also, unify
# the argument's type with what we can guess from the register used.
val, arg = self.stack_info.get_argument(ret.value)
self.stack_info.add_argument(arg)
val.type.unify(ret.type)
return val
if data.meta.force:
assert isinstance(ret, EvalOnceExpr)
ret.force()
return ret
def __contains__(self, key: Register) -> bool:
return key in self.contents
def __setitem__(self, key: Register, value: Expression) -> None:
self.set_with_meta(key, value, RegMeta())
def set_with_meta(self, key: Register, value: Expression, meta: RegMeta) -> None:
if self._active_instr is not None and key not in self._active_instr.outputs:
raise DecompFailure(f"Undeclared write to {key} in {self._active_instr}")
self.unchecked_set_with_meta(key, value, meta)
def unchecked_set_with_meta(
self, key: Register, value: Expression, meta: RegMeta
) -> None:
assert key != Register("zero")
self.contents[key] = RegData(value, meta)
def __delitem__(self, key: Register) -> None:
assert key != Register("zero")
del self.contents[key]
def get_raw(self, key: Register) -> Optional[Expression]:
data = self.contents.get(key)
return data.value if data is not None else None
def get_meta(self, key: Register) -> Optional[RegMeta]:
data = self.contents.get(key)
return data.meta if data is not None else None
@contextmanager
def current_instr(self, instr: Instruction) -> Iterator[None]:
self._active_instr = instr
try:
with current_instr(instr):
yield
finally:
self._active_instr = None
def __str__(self) -> str:
return ", ".join(
f"{k}: {v.value}"
for k, v in sorted(self.contents.items(), key=lambda x: x[0].register_name)
if not self.stack_info.should_save(v.value, None)
)
@dataclass
class BlockInfo:
"""
Contains translated assembly code (to_write), the block's branch condition,
and block's final register states.
"""
to_write: List[Statement]
return_value: Optional[Expression]
switch_control: Optional[SwitchControl]
branch_condition: Optional[Condition]
final_register_states: RegInfo
has_function_call: bool
def __str__(self) -> str:
newline = "\n\t"
return "\n".join(
[
f"Statements: {newline.join(str(w) for w in self.statements_to_write())}",
f"Branch condition: {self.branch_condition}",
f"Final register states: {self.final_register_states}",
]
)
def statements_to_write(self) -> List[Statement]:
return [st for st in self.to_write if st.should_write()]
def get_block_info(node: Node) -> BlockInfo:
ret = node.block.block_info
assert isinstance(ret, BlockInfo)
return ret
@dataclass
class InstrArgs:
raw_args: List[Argument]
regs: RegInfo = field(repr=False)
stack_info: StackInfo = field(repr=False)
def raw_arg(self, index: int) -> Argument:
assert index >= 0
if index >= len(self.raw_args):
raise DecompFailure(
f"Too few arguments for instruction, expected at least {index + 1}"
)
return self.raw_args[index]
def reg_ref(self, index: int) -> Register:
ret = self.raw_arg(index)
if not isinstance(ret, Register):
raise DecompFailure(
f"Expected instruction argument to be a register, but found {ret}"
)
return ret
def imm_value(self, index: int) -> int:
arg = self.full_imm(index)
assert isinstance(arg, Literal)
return arg.value
def reg(self, index: int) -> Expression:
return self.regs[self.reg_ref(index)]
def dreg(self, index: int) -> Expression:
"""Extract a double from a register. This may involve reading both the
mentioned register and the next."""
reg = self.reg_ref(index)
if not reg.is_float():
raise DecompFailure(
f"Expected instruction argument {reg} to be a float register"
)
ret = self.regs[reg]
# PPC: FPR's hold doubles (64 bits), so we don't need to do anything special
if self.stack_info.global_info.arch.arch == Target.ArchEnum.PPC:
return ret
# MIPS: Look at the paired FPR to get the full 64-bit value
if not isinstance(ret, Literal) or ret.type.get_size_bits() == 64:
return ret
reg_num = int(reg.register_name[1:])
if reg_num % 2 != 0:
raise DecompFailure(
"Tried to use a double-precision instruction with odd-numbered float "
f"register {reg}"
)
other = self.regs[Register(f"f{reg_num+1}")]
if not isinstance(other, Literal) or other.type.get_size_bits() == 64:
raise DecompFailure(
f"Unable to determine a value for double-precision register {reg} "
"whose second half is non-static. This is a mips_to_c restriction "
"which may be lifted in the future."
)
value = ret.value | (other.value << 32)
return Literal(value, type=Type.f64())
def cmp_reg(self, key: str) -> Condition:
cond = self.regs[Register(key)]
if not isinstance(cond, Condition):
cond = BinaryOp.icmp(cond, "!=", Literal(0))
return cond
def full_imm(self, index: int) -> Expression:
arg = strip_macros(self.raw_arg(index))
ret = literal_expr(arg, self.stack_info)
return ret
def imm(self, index: int) -> Expression:
ret = self.full_imm(index)
if isinstance(ret, Literal):
return Literal(((ret.value + 0x8000) & 0xFFFF) - 0x8000)
return ret
def unsigned_imm(self, index: int) -> Expression:
ret = self.full_imm(index)
if isinstance(ret, Literal):
return Literal(ret.value & 0xFFFF)
return ret
def hi_imm(self, index: int) -> Argument:
arg = self.raw_arg(index)
if not isinstance(arg, Macro) or arg.macro_name not in ("hi", "ha", "h"):
raise DecompFailure(
f"Got lui/lis instruction with macro other than %hi/@ha/@h: {arg}"
)
return arg.argument
def shifted_imm(self, index: int) -> Expression:
# TODO: Should this be part of hi_imm? Do we need to handle @ha?
raw_imm = self.unsigned_imm(index)
assert isinstance(raw_imm, Literal)
return Literal(raw_imm.value << 16)
def memory_ref(self, index: int) -> Union[AddressMode, RawSymbolRef]:
ret = strip_macros(self.raw_arg(index))
# In MIPS, we want to allow "lw $v0, symbol + 4", which is outputted by
# some disassemblers (like IDA) even though it isn't valid assembly.
# For PPC, we want to allow "lwz $r1, symbol@sda21($r13)" where $r13 is
# assumed to point to the start of a small data area (SDA).
if isinstance(ret, AsmGlobalSymbol):
return RawSymbolRef(offset=0, sym=ret)
if (
isinstance(ret, BinOp)
and ret.op in "+-"
and isinstance(ret.lhs, AsmGlobalSymbol)
and isinstance(ret.rhs, AsmLiteral)
):
sign = 1 if ret.op == "+" else -1
return RawSymbolRef(offset=(ret.rhs.value * sign), sym=ret.lhs)
if not isinstance(ret, AsmAddressMode):
raise DecompFailure(
"Expected instruction argument to be of the form offset($register), "
f"but found {ret}"
)
if not isinstance(ret.lhs, AsmLiteral):
raise DecompFailure(
f"Unable to parse offset for instruction argument {ret}. "
"Expected a constant or a %lo macro."
)
return AddressMode(offset=ret.lhs.signed_value(), rhs=ret.rhs)
def count(self) -> int:
return len(self.raw_args)
def deref(
arg: Union[AddressMode, RawSymbolRef, Expression],
regs: RegInfo,
stack_info: StackInfo,
*,
size: int,
store: bool = False,
) -> Expression:
if isinstance(arg, Expression):
offset = 0
var = arg
elif isinstance(arg, AddressMode):
offset = arg.offset
if stack_info.is_stack_reg(arg.rhs):
return stack_info.get_stack_var(offset, store=store)
var = regs[arg.rhs]
else:
offset = arg.offset
var = stack_info.global_info.address_of_gsym(arg.sym.symbol_name)
# Struct member is being dereferenced.
# Cope slightly better with raw pointers.
if isinstance(var, Literal) and var.value % (2 ** 16) == 0:
var = Literal(var.value + offset, type=var.type)
offset = 0
# Handle large struct offsets.
uw_var = early_unwrap(var)
if isinstance(uw_var, BinaryOp) and uw_var.op == "+":
for base, addend in [(uw_var.left, uw_var.right), (uw_var.right, uw_var.left)]:
if isinstance(addend, Literal) and addend.likely_partial_offset():
offset += addend.value
var = base
uw_var = early_unwrap(var)
break
var.type.unify(Type.ptr())
stack_info.record_struct_access(var, offset)
field_name: Optional[str] = None
type: Type = stack_info.unique_type_for("struct", (uw_var, offset), Type.any())
# Struct access with type information.
array_expr = array_access_from_add(
var, offset, stack_info, target_size=size, ptr=False
)
if array_expr is not None:
return array_expr
field_path, field_type, _ = var.type.get_deref_field(offset, target_size=size)
if field_path is not None:
field_type.unify(type)
type = field_type
else:
field_path = None
return StructAccess(
struct_var=var,
offset=offset,
target_size=size,
field_path=field_path,
stack_info=stack_info,
type=type,
)
def is_trivial_expression(expr: Expression) -> bool:
# Determine whether an expression should be evaluated only once or not.
if isinstance(
expr,
(
EvalOnceExpr,
Literal,
GlobalSymbol,
LocalVar,
PassedInArg,
PhiExpr,
RegisterVar,
SubroutineArg,
),
):
return True
if isinstance(expr, AddressOf):
return all(is_trivial_expression(e) for e in expr.dependencies())
if isinstance(expr, Cast):
return expr.is_trivial()
return False
def is_type_obvious(expr: Expression) -> bool:
"""
Determine whether an expression's type is "obvious", e.g. because the
expression refers to a variable which has a declaration. With perfect type
information this this function would not be needed.
This function may produce wrong results while code is being generated,
since at that point we don't know the final status of EvalOnceExpr's.
"""
if isinstance(
expr,
(
Cast,
Literal,
AddressOf,
LocalVar,
PhiExpr,
PassedInArg,
RegisterVar,
FuncCall,
),
):
return True
if isinstance(expr, EvalOnceExpr):
if expr.need_decl():
return True
return is_type_obvious(expr.wrapped_expr)
return False
def simplify_condition(expr: Expression) -> Expression:
"""
Simplify a boolean expression.
This function may produce wrong results while code is being generated,
since at that point we don't know the final status of EvalOnceExpr's.
"""
if isinstance(expr, EvalOnceExpr) and not expr.need_decl():
return simplify_condition(expr.wrapped_expr)
if isinstance(expr, UnaryOp):
inner = simplify_condition(expr.expr)
if expr.op == "!" and isinstance(inner, Condition):
return inner.negated()
return UnaryOp(expr=inner, op=expr.op, type=expr.type)
if isinstance(expr, BinaryOp):
left = simplify_condition(expr.left)
right = simplify_condition(expr.right)
if isinstance(left, BinaryOp) and left.is_comparison() and right == Literal(0):
if expr.op == "==":
return simplify_condition(left.negated())
if expr.op == "!=":
return left
if (
expr.is_comparison()
and isinstance(left, Literal)
and not isinstance(right, Literal)
):
return BinaryOp(
left=right,
op=expr.op.translate(str.maketrans("<>", "><")),
right=left,
type=expr.type,
)
return BinaryOp(left=left, op=expr.op, right=right, type=expr.type)
return expr
def balanced_parentheses(string: str) -> bool:
"""
Check if parentheses in a string are balanced, ignoring any non-parenthesis
characters. E.g. true for "(x())yz", false for ")(" or "(".
"""
bal = 0
for c in string:
if c == "(":
bal += 1
elif c == ")":
if bal == 0:
return False
bal -= 1
return bal == 0
def format_expr(expr: Expression, fmt: Formatter) -> str:
"""Stringify an expression, stripping unnecessary parentheses around it."""
ret = expr.format(fmt)
if ret.startswith("(") and balanced_parentheses(ret[1:-1]):
return ret[1:-1]
return ret
def format_assignment(dest: Expression, source: Expression, fmt: Formatter) -> str:
"""Stringify `dest = source;`."""
dest = late_unwrap(dest)
source = late_unwrap(source)
if isinstance(source, BinaryOp) and source.op in COMPOUND_ASSIGNMENT_OPS:
rhs = None
if late_unwrap(source.left) == dest:
rhs = source.right
elif late_unwrap(source.right) == dest and source.op in ASSOCIATIVE_OPS:
rhs = source.left
if rhs is not None:
return f"{dest.format(fmt)} {source.op}= {format_expr(rhs, fmt)};"
return f"{dest.format(fmt)} = {format_expr(source, fmt)};"
def parenthesize_for_struct_access(expr: Expression, fmt: Formatter) -> str:
# Nested dereferences may need to be parenthesized. All other
# expressions will already have adequate parentheses added to them.
s = expr.format(fmt)
if (
s.startswith("*")
or s.startswith("&")
or (isinstance(expr, Cast) and expr.needed_for_store())
):
return f"({s})"
return s
def elide_casts_for_store(expr: Expression) -> Expression:
uw_expr = late_unwrap(expr)
if isinstance(uw_expr, Cast) and not uw_expr.needed_for_store():
return elide_casts_for_store(uw_expr.expr)
if isinstance(uw_expr, Literal) and uw_expr.type.is_int():
# Avoid suffixes for unsigned ints
return replace(uw_expr, elide_cast=True)
return uw_expr
def uses_expr(expr: Expression, expr_filter: Callable[[Expression], bool]) -> bool:
if expr_filter(expr):
return True
for e in expr.dependencies():
if uses_expr(e, expr_filter):
return True
return False
def late_unwrap(expr: Expression) -> Expression:
"""
Unwrap EvalOnceExpr's, stopping at variable boundaries.
This function may produce wrong results while code is being generated,
since at that point we don't know the final status of EvalOnceExpr's.
"""
if isinstance(expr, EvalOnceExpr) and not expr.need_decl():
return late_unwrap(expr.wrapped_expr)
if isinstance(expr, PhiExpr) and expr.replacement_expr is not None:
return late_unwrap(expr.replacement_expr)
return expr
def early_unwrap(expr: Expression) -> Expression:
"""
Unwrap EvalOnceExpr's, even past variable boundaries.
This is fine to use even while code is being generated, but disrespects decisions
to use a temp for a value, so use with care.
"""
if (
isinstance(expr, EvalOnceExpr)
and not expr.forced_emit
and not expr.emit_exactly_once
):
return early_unwrap(expr.wrapped_expr)
return expr
def early_unwrap_ints(expr: Expression) -> Expression:
"""
Unwrap EvalOnceExpr's, even past variable boundaries or through int Cast's
This is a bit sketchier than early_unwrap(), but can be used for pattern matching.
"""
uw_expr = early_unwrap(expr)
if isinstance(uw_expr, Cast) and uw_expr.reinterpret and uw_expr.type.is_int():
return early_unwrap_ints(uw_expr.expr)
return uw_expr
def unwrap_deep(expr: Expression) -> Expression:
"""
Unwrap EvalOnceExpr's, even past variable boundaries.
This is generally a sketchy thing to do, try to avoid it. In particular:
- the returned expression is not usable for emission, because it may contain
accesses at an earlier point in time or an expression that should not be repeated.
- just because unwrap_deep(a) == unwrap_deep(b) doesn't mean a and b are
interchangable, because they may be computed in different places.
"""
if isinstance(expr, EvalOnceExpr):
return unwrap_deep(expr.wrapped_expr)
return expr
def literal_expr(arg: Argument, stack_info: StackInfo) -> Expression:
if isinstance(arg, AsmGlobalSymbol):
return stack_info.global_info.address_of_gsym(arg.symbol_name)
if isinstance(arg, AsmLiteral):
return Literal(arg.value)
if isinstance(arg, BinOp):
lhs = literal_expr(arg.lhs, stack_info)
rhs = literal_expr(arg.rhs, stack_info)
return BinaryOp.int(left=lhs, op=arg.op, right=rhs)
raise DecompFailure(f"Instruction argument {arg} must be a literal")
def imm_add_32(expr: Expression) -> Expression:
if isinstance(expr, Literal):
return as_intish(Literal(expr.value + 32))
else:
return BinaryOp.int(expr, "+", Literal(32))
def fn_op(fn_name: str, args: List[Expression], type: Type) -> FuncCall:
fn_sig = FunctionSignature(
return_type=type,
params=[FunctionParam(type=arg.type) for arg in args],
params_known=True,
is_variadic=False,
)
return FuncCall(
function=GlobalSymbol(symbol_name=fn_name, type=Type.function(fn_sig)),
args=args,
type=type,
)
def void_fn_op(fn_name: str, args: List[Expression]) -> ExprStmt:
fn_call = fn_op(fn_name, args, Type.any_reg())
fn_call.use()
return ExprStmt(fn_call)
def load_upper(args: InstrArgs) -> Expression:
arg = args.raw_arg(1)
if not isinstance(arg, Macro):
assert not isinstance(
arg, Literal
), "normalize_instruction should convert lui/lis <literal> to li"
raise DecompFailure(
f"lui/lis argument must be a literal or %hi/@ha macro, found {arg}"
)
hi_arg = args.hi_imm(1)
if (
isinstance(hi_arg, BinOp)
and hi_arg.op in "+-"
and isinstance(hi_arg.lhs, AsmGlobalSymbol)
and isinstance(hi_arg.rhs, AsmLiteral)
):
sym = hi_arg.lhs
offset = hi_arg.rhs.value * (-1 if hi_arg.op == "-" else 1)
elif isinstance(hi_arg, AsmGlobalSymbol):
sym = hi_arg
offset = 0
else:
raise DecompFailure(f"Invalid %hi/@ha argument {hi_arg}")
stack_info = args.stack_info
source = stack_info.global_info.address_of_gsym(sym.symbol_name)
imm = Literal(offset)
return handle_addi_real(args.reg_ref(0), None, source, imm, stack_info)
def handle_convert(expr: Expression, dest_type: Type, source_type: Type) -> Cast:
# int <-> float casts should be explicit
silent = dest_type.data().kind != source_type.data().kind
expr.type.unify(source_type)
return Cast(expr=expr, type=dest_type, silent=silent, reinterpret=False)
def handle_la(args: InstrArgs) -> Expression:
target = args.memory_ref(1)
stack_info = args.stack_info
if isinstance(target, AddressMode):
return handle_addi(
InstrArgs(
raw_args=[args.reg_ref(0), target.rhs, AsmLiteral(target.offset)],
regs=args.regs,
stack_info=args.stack_info,
)
)
var = stack_info.global_info.address_of_gsym(target.sym.symbol_name)
return add_imm(var, Literal(target.offset), stack_info)
def handle_or(left: Expression, right: Expression) -> Expression:
if left == right:
# `or $rD, $rS, $rS` can be used to move $rS into $rD
return left
if isinstance(left, Literal) and isinstance(right, Literal):
if (((left.value & 0xFFFF) == 0 and (right.value & 0xFFFF0000) == 0)) or (
(right.value & 0xFFFF) == 0 and (left.value & 0xFFFF0000) == 0
):
return Literal(value=(left.value | right.value))
# Regular bitwise OR.
return BinaryOp.int(left=left, op="|", right=right)
def handle_sltu(args: InstrArgs) -> Expression:
right = args.reg(2)
if args.reg_ref(1) == Register("zero"):
# (0U < x) is equivalent to (x != 0)
uw_right = early_unwrap(right)
if isinstance(uw_right, BinaryOp) and uw_right.op == "^":
# ((a ^ b) != 0) is equivalent to (a != b)
return BinaryOp.icmp(uw_right.left, "!=", uw_right.right)
return BinaryOp.icmp(right, "!=", Literal(0))
else:
left = args.reg(1)
return BinaryOp.ucmp(left, "<", right)
def handle_sltiu(args: InstrArgs) -> Expression:
left = args.reg(1)
right = args.imm(2)
if isinstance(right, Literal):
value = right.value & 0xFFFFFFFF
if value == 1:
# (x < 1U) is equivalent to (x == 0)
uw_left = early_unwrap(left)
if isinstance(uw_left, BinaryOp) and uw_left.op == "^":
# ((a ^ b) == 0) is equivalent to (a == b)
return BinaryOp.icmp(uw_left.left, "==", uw_left.right)
return BinaryOp.icmp(left, "==", Literal(0))
else:
right = Literal(value)
return BinaryOp.ucmp(left, "<", right)
def handle_addi(args: InstrArgs) -> Expression:
stack_info = args.stack_info
source_reg = args.reg_ref(1)
source = args.reg(1)
imm = args.imm(2)
# `(x + 0xEDCC)` is emitted as `((x + 0x10000) - 0x1234)`,
# i.e. as an `addis` followed by an `addi`
uw_source = early_unwrap(source)
if (
isinstance(uw_source, BinaryOp)
and uw_source.op == "+"
and isinstance(uw_source.right, Literal)
and uw_source.right.value % 0x10000 == 0
and isinstance(imm, Literal)
):
return add_imm(
uw_source.left, Literal(imm.value + uw_source.right.value), stack_info
)
return handle_addi_real(args.reg_ref(0), source_reg, source, imm, stack_info)
def handle_addis(args: InstrArgs) -> Expression:
stack_info = args.stack_info
source_reg = args.reg_ref(1)
source = args.reg(1)
imm = args.shifted_imm(2)
return handle_addi_real(args.reg_ref(0), source_reg, source, imm, stack_info)
def handle_addi_real(
output_reg: Register,
source_reg: Optional[Register],
source: Expression,
imm: Expression,
stack_info: StackInfo,
) -> Expression:
if source_reg is not None and stack_info.is_stack_reg(source_reg):
# Adding to sp, i.e. passing an address.
assert isinstance(imm, Literal)
if stack_info.is_stack_reg(output_reg):
# Changing sp. Just ignore that.
return source
# Keep track of all local variables that we take addresses of.
var = stack_info.get_stack_var(imm.value, store=False)
if isinstance(var, LocalVar):
stack_info.add_local_var(var)
return AddressOf(var, type=var.type.reference())
else:
return add_imm(source, imm, stack_info)
def add_imm(source: Expression, imm: Expression, stack_info: StackInfo) -> Expression:
if imm == Literal(0):
# addiu $reg1, $reg2, 0 is a move
# (this happens when replacing %lo(...) by 0)
return source
elif source.type.is_pointer_or_array():
# Pointer addition (this may miss some pointers that get detected later;
# unfortunately that's hard to do anything about with mips_to_c's single-pass
# architecture).
if isinstance(imm, Literal) and not imm.likely_partial_offset():
array_access = array_access_from_add(
source, imm.value, stack_info, target_size=None, ptr=True
)
if array_access is not None:
return array_access
field_path, field_type, _ = source.type.get_deref_field(
imm.value, target_size=None
)
if field_path is not None:
return AddressOf(
StructAccess(
struct_var=source,
offset=imm.value,
target_size=None,
field_path=field_path,
stack_info=stack_info,
type=field_type,
),
type=field_type.reference(),
)
if isinstance(imm, Literal):
target = source.type.get_pointer_target()
if target:
target_size = target.get_size_bytes()
if target_size and imm.value % target_size == 0:
# Pointer addition.
return BinaryOp(
left=source, op="+", right=as_intish(imm), type=source.type
)
return BinaryOp(left=source, op="+", right=as_intish(imm), type=Type.ptr())
elif isinstance(source, Literal) and isinstance(imm, Literal):
return Literal(source.value + imm.value)
else:
# Regular binary addition.
return BinaryOp.intptr(left=source, op="+", right=imm)
def handle_load(args: InstrArgs, type: Type) -> Expression:
# For now, make the cast silent so that output doesn't become cluttered.
# Though really, it would be great to expose the load types somehow...
size = type.get_size_bytes()
assert size is not None
expr = deref(args.memory_ref(1), args.regs, args.stack_info, size=size)
# Detect rodata constants
if isinstance(expr, StructAccess) and expr.offset == 0:
target = early_unwrap(expr.struct_var)
if (
isinstance(target, AddressOf)
and isinstance(target.expr, GlobalSymbol)
and type.is_likely_float()
):
sym_name = target.expr.symbol_name
ent = args.stack_info.global_info.asm_data_value(sym_name)
if (
ent
and ent.data
and isinstance(ent.data[0], bytes)
and len(ent.data[0]) >= size
and ent.is_readonly
and type.unify(target.expr.type)
):
data = ent.data[0][:size]
val: int
if size == 4:
(val,) = struct.unpack(">I", data)
else:
(val,) = struct.unpack(">Q", data)
return Literal(value=val, type=type)
return as_type(expr, type, silent=True)
def deref_unaligned(
arg: Union[AddressMode, RawSymbolRef],
regs: RegInfo,
stack_info: StackInfo,
*,
store: bool = False,
) -> Expression:
# We don't know the correct size pass to deref. Passing None would signal that we
# are taking an address, cause us to prefer entire substructs as referenced fields,
# which would be confusing. Instead, we lie and pass 1. Hopefully nothing bad will
# happen...
return deref(arg, regs, stack_info, size=1, store=store)
def handle_lwl(args: InstrArgs) -> Expression:
# Unaligned load for the left part of a register (lwl can technically merge with
# a pre-existing lwr, but doesn't in practice, so we treat this as a standard
# destination-first operation)
ref = args.memory_ref(1)
expr = deref_unaligned(ref, args.regs, args.stack_info)
key: Tuple[int, object]
if isinstance(ref, AddressMode):
key = (ref.offset, args.regs[ref.rhs])
else:
key = (ref.offset, ref.sym)
return Lwl(expr, key)
def handle_lwr(args: InstrArgs) -> Expression:
# Unaligned load for the right part of a register. This lwr may merge with an
# existing lwl, if it loads from the same target but with an offset that's +3.
uw_old_value = early_unwrap(args.reg(0))
ref = args.memory_ref(1)
lwl_key: Tuple[int, object]
if isinstance(ref, AddressMode):
lwl_key = (ref.offset - 3, args.regs[ref.rhs])
else:
lwl_key = (ref.offset - 3, ref.sym)
if isinstance(uw_old_value, Lwl) and uw_old_value.key[0] == lwl_key[0]:
return UnalignedLoad(uw_old_value.load_expr)
if ref.offset % 4 == 2:
left_mem_ref = replace(ref, offset=ref.offset - 2)
load_expr = deref_unaligned(left_mem_ref, args.regs, args.stack_info)
return Load3Bytes(load_expr)
return ErrorExpr("Unable to handle lwr; missing a corresponding lwl")
def make_store(args: InstrArgs, type: Type) -> Optional[StoreStmt]:
size = type.get_size_bytes()
assert size is not None
stack_info = args.stack_info
source_reg = args.reg_ref(0)
source_raw = args.regs.get_raw(source_reg)
if type.is_likely_float() and size == 8:
source_val = args.dreg(0)
else:
source_val = args.reg(0)
target = args.memory_ref(1)
is_stack = isinstance(target, AddressMode) and stack_info.is_stack_reg(target.rhs)
if (
is_stack
and source_raw is not None
and stack_info.should_save(source_raw, target.offset)
):
# Elide register preserval.
return None
dest = deref(target, args.regs, stack_info, size=size, store=True)
dest.type.unify(type)
return StoreStmt(source=as_type(source_val, type, silent=is_stack), dest=dest)
def make_storex(args: InstrArgs, type: Type) -> Optional[StoreStmt]:
# "indexed stores" like `stwx rS, rA, rB` write `rS` into `(rA + rB)`
size = type.get_size_bytes()
assert size is not None
source = args.reg(0)
ptr = BinaryOp.intptr(left=args.reg(1), op="+", right=args.reg(2))
# TODO: Can we assume storex's are never used to save registers to the stack?
dest = deref(ptr, args.regs, args.stack_info, size=size, store=True)
dest.type.unify(type)
return StoreStmt(source=as_type(source, type, silent=False), dest=dest)
def handle_swl(args: InstrArgs) -> Optional[StoreStmt]:
# swl in practice only occurs together with swr, so we can treat it as a regular
# store, with the expression wrapped in UnalignedLoad if needed.
source = args.reg(0)
target = args.memory_ref(1)
if not isinstance(early_unwrap(source), UnalignedLoad):
source = UnalignedLoad(source)
dest = deref_unaligned(target, args.regs, args.stack_info, store=True)
return StoreStmt(source=source, dest=dest)
def handle_swr(args: InstrArgs) -> Optional[StoreStmt]:
expr = early_unwrap(args.reg(0))
target = args.memory_ref(1)
if not isinstance(expr, Load3Bytes):
# Elide swr's that don't come from 3-byte-loading lwr's; they probably
# come with a corresponding swl which has already been emitted.
return None
real_target = replace(target, offset=target.offset - 2)
dest = deref_unaligned(real_target, args.regs, args.stack_info, store=True)
return StoreStmt(source=expr, dest=dest)
def handle_sra(args: InstrArgs) -> Expression:
lhs = args.reg(1)
shift = args.imm(2)
if isinstance(shift, Literal) and shift.value in [16, 24]:
expr = early_unwrap(lhs)
pow2 = 1 << shift.value
if isinstance(expr, BinaryOp) and isinstance(expr.right, Literal):
tp = Type.s16() if shift.value == 16 else Type.s8()
rhs = expr.right.value
if expr.op == "<<" and rhs == shift.value:
return as_type(expr.left, tp, silent=False)
elif expr.op == "<<" and rhs > shift.value:
new_shift = fold_mul_chains(
BinaryOp.int(expr.left, "<<", Literal(rhs - shift.value))
)
return as_type(new_shift, tp, silent=False)
elif expr.op == "*" and rhs % pow2 == 0 and rhs != pow2:
mul = BinaryOp.int(expr.left, "*", Literal(value=rhs // pow2))
return as_type(mul, tp, silent=False)
return fold_divmod(
BinaryOp(as_sintish(lhs), ">>", as_intish(shift), type=Type.s32())
)
def handle_conditional_move(args: InstrArgs, nonzero: bool) -> Expression:
op = "!=" if nonzero else "=="
type = Type.any_reg()
return TernaryOp(
BinaryOp.scmp(args.reg(2), op, Literal(0)),
as_type(args.reg(1), type, silent=True),
as_type(args.reg(0), type, silent=True),
type,
)
def format_f32_imm(num: int) -> str:
packed = struct.pack(">I", num & (2 ** 32 - 1))
value = struct.unpack(">f", packed)[0]
if not value or value == 4294967296.0:
# Zero, negative zero, nan, or INT_MAX.
return str(value)
# Write values smaller than 1e-7 / greater than 1e7 using scientific notation,
# and values in between using fixed point.
if abs(math.log10(abs(value))) > 6.9:
fmt_char = "e"
elif abs(value) < 1:
fmt_char = "f"
else:
fmt_char = "g"
def fmt(prec: int) -> str:
"""Format 'value' with 'prec' significant digits/decimals, in either scientific
or regular notation depending on 'fmt_char'."""
ret = ("{:." + str(prec) + fmt_char + "}").format(value)
if fmt_char == "e":
return ret.replace("e+", "e").replace("e0", "e").replace("e-0", "e-")
if "e" in ret:
# The "g" format character can sometimes introduce scientific notation if
# formatting with too few decimals. If this happens, return an incorrect
# value to prevent the result from being used.
#
# Since the value we are formatting is within (1e-7, 1e7) in absolute
# value, it will at least be possible to format with 7 decimals, which is
# less than float precision. Thus, this annoying Python limitation won't
# lead to us outputting numbers with more precision than we really have.
return "0"
return ret
# 20 decimals is more than enough for a float. Start there, then try to shrink it.
prec = 20
while prec > 0:
prec -= 1
value2 = float(fmt(prec))
if struct.pack(">f", value2) != packed:
prec += 1
break
if prec == 20:
# Uh oh, even the original value didn't format correctly. Fall back to str(),
# which ought to work.
return str(value)
ret = fmt(prec)
if "." not in ret:
ret += ".0"
return ret
def format_f64_imm(num: int) -> str:
(value,) = struct.unpack(">d", struct.pack(">Q", num & (2 ** 64 - 1)))
return str(value)
def fold_divmod(original_expr: BinaryOp) -> BinaryOp:
"""
Return a new BinaryOp instance if this one can be simplified to a single / or % op.
This involves simplifying expressions using MULT_HI, MULTU_HI, +, -, <<, >>, and /.
In GCC 2.7.2, the code that generates these instructions is in expmed.c.
See also https://ridiculousfish.com/blog/posts/labor-of-division-episode-i.html
for a modern writeup of a similar algorithm.
This optimization is also used by MWCC and modern compilers (but not IDO).
"""
mult_high_ops = ("MULT_HI", "MULTU_HI")
possible_match_ops = mult_high_ops + ("-", "+", ">>")
# Only operate on integer expressions of certain operations
if original_expr.is_floating() or original_expr.op not in possible_match_ops:
return original_expr
# Use `early_unwrap_ints` instead of `early_unwrap` to ignore Casts to integer types
# Although this discards some extra type information, this function largely ignores
# sign/size information to stay simpler. The result will be made with BinaryOp.int()
# regardless of input types.
expr = original_expr
left_expr = early_unwrap_ints(expr.left)
right_expr = early_unwrap_ints(expr.right)
divisor_shift = 0
# Detect signed power-of-two division: (x >> N) + MIPS2C_CARRY --> x / (1 << N)
if (
isinstance(left_expr, BinaryOp)
and left_expr.op == ">>"
and isinstance(left_expr.right, Literal)
and expr.op == "+"
and isinstance(right_expr, CarryBit)
):
new_denom = 1 << left_expr.right.value
return BinaryOp.sint(
left=left_expr.left,
op="/",
right=Literal(new_denom),
silent=True,
)
# Fold `/` with `>>`: ((x / N) >> M) --> x / (N << M)
# NB: If x is signed, this is only correct if there is a sign-correcting subtraction term
if (
isinstance(left_expr, BinaryOp)
and left_expr.op == "/"
and isinstance(left_expr.right, Literal)
and expr.op == ">>"
and isinstance(right_expr, Literal)
):
new_denom = left_expr.right.value << right_expr.value
if new_denom < (1 << 32):
return BinaryOp.int(
left=left_expr.left,
op="/",
right=Literal(new_denom),
)
# Detect `%`: (x - ((x / y) * y)) --> x % y
if expr.op == "-" and isinstance(right_expr, BinaryOp) and right_expr.op == "*":
div_expr = early_unwrap_ints(right_expr.left)
mod_base = early_unwrap_ints(right_expr.right)
if (
isinstance(div_expr, BinaryOp)
and early_unwrap_ints(div_expr.left) == left_expr
):
# Accept either `(x / y) * y` or `(x >> N) * M` (where `1 << N == M`)
divisor = early_unwrap_ints(div_expr.right)
if (div_expr.op == "/" and divisor == mod_base) or (
div_expr.op == ">>"
and isinstance(divisor, Literal)
and isinstance(mod_base, Literal)
and (1 << divisor.value) == mod_base.value
):
return BinaryOp.int(left=left_expr, op="%", right=right_expr.right)
# Detect dividing by a negative: ((x >> 31) - (x / N)) --> x / -N
if (
expr.op == "-"
and isinstance(left_expr, BinaryOp)
and left_expr.op == ">>"
and early_unwrap_ints(left_expr.right) == Literal(31)
and isinstance(right_expr, BinaryOp)
and right_expr.op == "/"
and isinstance(right_expr.right, Literal)
):
# Swap left_expr & right_expr, but replace the N in right_expr with -N
left_expr, right_expr = (
replace(right_expr, right=Literal(-right_expr.right.value)),
left_expr,
)
# Remove outer error term: ((x / N) + ((x / N) >> 31)) --> x / N
# As N gets close to (1 << 30), this is no longer a negligible error term
if (
expr.op == "+"
and isinstance(left_expr, BinaryOp)
and left_expr.op == "/"
and isinstance(left_expr.right, Literal)
and left_expr.right.value <= (1 << 29)
and isinstance(right_expr, BinaryOp)
and early_unwrap_ints(right_expr.left) == left_expr
and right_expr.op == ">>"
and early_unwrap_ints(right_expr.right) == Literal(31)
):
return left_expr
# Remove outer error term: ((x / N) - (x >> 31)) --> x / N
if (
expr.op == "-"
and isinstance(left_expr, BinaryOp)
and left_expr.op == "/"
and isinstance(left_expr.right, Literal)
and isinstance(right_expr, BinaryOp)
and right_expr.op == ">>"
and early_unwrap_ints(right_expr.right) == Literal(31)
):
div_expr = left_expr
shift_var_expr = early_unwrap_ints(right_expr.left)
div_var_expr = early_unwrap_ints(div_expr.left)
# Check if the LHS of the shift is the same var that we're dividing by
if div_var_expr == shift_var_expr:
if isinstance(div_expr.right, Literal) and div_expr.right.value >= (
1 << 30
):
return BinaryOp.int(
left=div_expr.left,
op=div_expr.op,
right=div_expr.right,
)
return div_expr
# If the var is under 32 bits, the error term may look like `(x << K) >> 31` instead
if (
isinstance(shift_var_expr, BinaryOp)
and early_unwrap_ints(div_expr.left)
== early_unwrap_ints(shift_var_expr.left)
and shift_var_expr.op == "<<"
and isinstance(shift_var_expr.right, Literal)
):
return div_expr
# Shift on the result of the mul: MULT_HI(x, N) >> M, shift the divisor by M
if (
isinstance(left_expr, BinaryOp)
and expr.op == ">>"
and isinstance(right_expr, Literal)
):
divisor_shift += right_expr.value
expr = left_expr
left_expr = early_unwrap_ints(expr.left)
right_expr = early_unwrap_ints(expr.right)
# Normalize MULT_HI(N, x) to MULT_HI(x, N)
if isinstance(left_expr, Literal) and not isinstance(right_expr, Literal):
left_expr, right_expr = right_expr, left_expr
# Remove inner addition: (MULT_HI(x, N) + x) >> M --> MULT_HI(x, N) >> M
# MULT_HI performs signed multiplication, so the `+ x` acts as setting the 32nd bit
# while having a result with the same sign as x.
# We can ignore it because `round_div` can work with arbitrarily large constants
if (
isinstance(left_expr, BinaryOp)
and left_expr.op == "MULT_HI"
and expr.op == "+"
and early_unwrap_ints(left_expr.left) == right_expr
):
expr = left_expr
left_expr = early_unwrap_ints(expr.left)
right_expr = early_unwrap_ints(expr.right)
# Shift on the LHS of the mul: MULT_HI(x >> M, N) --> MULT_HI(x, N) >> M
if (
expr.op in mult_high_ops
and isinstance(left_expr, BinaryOp)
and left_expr.op == ">>"
and isinstance(left_expr.right, Literal)
):
divisor_shift += left_expr.right.value
left_expr = early_unwrap_ints(left_expr.left)
# Instead of checking for the error term precisely, just check that
# the quotient is "close enough" to the integer value
def round_div(x: int, y: int) -> Optional[int]:
if y <= 1:
return None
result = round(x / y)
if x / (y + 1) <= result <= x / (y - 1):
return result
return None
if expr.op in mult_high_ops and isinstance(right_expr, Literal):
denom = round_div(1 << (32 + divisor_shift), right_expr.value)
if denom is not None:
return BinaryOp.int(
left=left_expr,
op="/",
right=Literal(denom),
)
return original_expr
def replace_clz_shift(expr: BinaryOp) -> BinaryOp:
"""
Simplify an expression matching `CLZ(x) >> 5` into `x == 0`,
and further simplify `(a - b) == 0` into `a == b`.
"""
# Check that the outer expression is `>>`
if expr.is_floating() or expr.op != ">>":
return expr
# Match `CLZ(x) >> 5`, or return the original expr
left_expr = early_unwrap_ints(expr.left)
right_expr = early_unwrap_ints(expr.right)
if not (
isinstance(left_expr, UnaryOp)
and left_expr.op == "CLZ"
and isinstance(right_expr, Literal)
and right_expr.value == 5
):
return expr
# If the inner `x` is `(a - b)`, return `a == b`
sub_expr = early_unwrap(left_expr.expr)
if (
isinstance(sub_expr, BinaryOp)
and not sub_expr.is_floating()
and sub_expr.op == "-"
):
return BinaryOp.icmp(sub_expr.left, "==", sub_expr.right)
return BinaryOp.icmp(left_expr.expr, "==", Literal(0, type=left_expr.expr.type))
def replace_bitand(expr: BinaryOp) -> Expression:
"""Detect expressions using `&` for truncating integer casts"""
if not expr.is_floating() and expr.op == "&":
if expr.right == Literal(0xFF):
return as_type(expr.left, Type.int_of_size(8), silent=False)
if expr.right == Literal(0xFFFF):
return as_type(expr.left, Type.int_of_size(16), silent=False)
return expr
def fold_mul_chains(expr: Expression) -> Expression:
"""Simplify an expression involving +, -, * and << to a single multiplication,
e.g. 4*x - x -> 3*x, or x<<2 -> x*4. This includes some logic for preventing
folds of consecutive sll, and keeping multiplications by large powers of two
as bitshifts at the top layer."""
def fold(
expr: Expression, toplevel: bool, allow_sll: bool
) -> Tuple[Expression, int]:
if isinstance(expr, BinaryOp):
lbase, lnum = fold(expr.left, False, (expr.op != "<<"))
rbase, rnum = fold(expr.right, False, (expr.op != "<<"))
if expr.op == "<<" and isinstance(expr.right, Literal) and allow_sll:
# Left-shifts by small numbers are easier to understand if
# written as multiplications (they compile to the same thing).
if toplevel and lnum == 1 and not (1 <= expr.right.value <= 4):
return (expr, 1)
return (lbase, lnum << expr.right.value)
if (
expr.op == "*"
and isinstance(expr.right, Literal)
and (allow_sll or expr.right.value % 2 != 0)
):
return (lbase, lnum * expr.right.value)
if early_unwrap(lbase) == early_unwrap(rbase):
if expr.op == "+":
return (lbase, lnum + rnum)
if expr.op == "-":
return (lbase, lnum - rnum)
if isinstance(expr, UnaryOp) and expr.op == "-" and not toplevel:
base, num = fold(expr.expr, False, True)
return (base, -num)
if (
isinstance(expr, EvalOnceExpr)
and not expr.emit_exactly_once
and not expr.forced_emit
):
base, num = fold(early_unwrap(expr), False, allow_sll)
if num != 1 and is_trivial_expression(base):
return (base, num)
return (expr, 1)
base, num = fold(expr, True, True)
if num == 1:
return expr
return BinaryOp.int(left=base, op="*", right=Literal(num))
def array_access_from_add(
expr: Expression,
offset: int,
stack_info: StackInfo,
*,
target_size: Optional[int],
ptr: bool,
) -> Optional[Expression]:
expr = early_unwrap(expr)
if not isinstance(expr, BinaryOp) or expr.op != "+":
return None
base = expr.left
addend = expr.right
if addend.type.is_pointer_or_array() and not base.type.is_pointer_or_array():
base, addend = addend, base
index: Expression
scale: int
uw_addend = early_unwrap(addend)
if (
isinstance(uw_addend, BinaryOp)
and uw_addend.op == "*"
and isinstance(uw_addend.right, Literal)
):
index = uw_addend.left
scale = uw_addend.right.value
elif (
isinstance(uw_addend, BinaryOp)
and uw_addend.op == "<<"
and isinstance(uw_addend.right, Literal)
):
index = uw_addend.left
scale = 1 << uw_addend.right.value
else:
index = addend
scale = 1
if scale < 0:
scale = -scale
index = UnaryOp.sint("-", index)
target_type = base.type.get_pointer_target()
if target_type is None:
return None
uw_base = early_unwrap(base)
typepool = stack_info.global_info.typepool
# In `&x + index * scale`, if the type of `x` is not known, try to mark it as an array.
# Skip the `scale = 1` case because this often indicates a complex `index` expression,
# and is not actually a 1-byte array lookup.
if (
scale > 1
and offset == 0
and isinstance(uw_base, AddressOf)
and target_type.get_size_bytes() is None
):
inner_type: Optional[Type] = None
if (
isinstance(uw_base.expr, GlobalSymbol)
and uw_base.expr.potential_array_dim(scale)[1] != 0
):
# For GlobalSymbols, use the size of the asm data to check the feasibility of being
# an array with `scale`. This helps be more conservative around fake symbols.
pass
elif scale == 2:
# This *could* be a struct, but is much more likely to be an int
inner_type = Type.int_of_size(16)
elif scale == 4:
inner_type = Type.reg32(likely_float=False)
elif typepool.unk_inference and isinstance(uw_base.expr, GlobalSymbol):
# Make up a struct with a tag name based on the symbol & struct size.
# Although `scale = 8` could indicate an array of longs/doubles, it seems more
# common to be an array of structs.
struct_name = f"_struct_{uw_base.expr.symbol_name}_0x{scale:X}"
struct = typepool.get_struct_by_tag_name(
struct_name, stack_info.global_info.typemap
)
if struct is None:
struct = StructDeclaration.unknown(
typepool, size=scale, tag_name=struct_name
)
elif struct.size != scale:
# This should only happen if there was already a struct with this name in the context
raise DecompFailure(f"sizeof(struct {struct_name}) != {scale:#x}")
inner_type = Type.struct(struct)
if inner_type is not None:
# This might fail, if `uw_base.expr.type` can't be changed to an array
uw_base.expr.type.unify(Type.array(inner_type, dim=None))
# This acts as a backup, and will usually succeed
target_type.unify(inner_type)
if target_type.get_size_bytes() == scale:
# base[index]
pass
else:
# base->subarray[index]
sub_path, sub_type, remaining_offset = base.type.get_deref_field(
offset, target_size=scale, exact=False
)
# Check if the last item in the path is `0`, which indicates the start of an array
# If it is, remove it: it will be replaced by `[index]`
if sub_path is None or len(sub_path) < 2 or sub_path[-1] != 0:
return None
sub_path.pop()
base = StructAccess(
struct_var=base,
offset=offset - remaining_offset,
target_size=None,
field_path=sub_path,
stack_info=stack_info,
type=sub_type,
)
offset = remaining_offset
target_type = sub_type
ret: Expression = ArrayAccess(base, index, type=target_type)
# Add .field if necessary by wrapping ret in StructAccess(AddressOf(...))
ret_ref = AddressOf(ret, type=ret.type.reference())
field_path, field_type, _ = ret_ref.type.get_deref_field(
offset, target_size=target_size
)
if offset != 0 or (target_size is not None and target_size != scale):
ret = StructAccess(
struct_var=ret_ref,
offset=offset,
target_size=target_size,
field_path=field_path,
stack_info=stack_info,
type=field_type,
)
if ptr:
ret = AddressOf(ret, type=ret.type.reference())
return ret
def handle_add(args: InstrArgs) -> Expression:
lhs = args.reg(1)
rhs = args.reg(2)
stack_info = args.stack_info
type = Type.intptr()
# Because lhs & rhs are in registers, it shouldn't be possible for them to be arrays.
# If they are, treat them the same as pointers anyways.
if lhs.type.is_pointer_or_array():
type = Type.ptr()
elif rhs.type.is_pointer_or_array():
type = Type.ptr()
# addiu instructions can sometimes be emitted as addu instead, when the
# offset is too large.
if isinstance(rhs, Literal):
return handle_addi_real(args.reg_ref(0), args.reg_ref(1), lhs, rhs, stack_info)
if isinstance(lhs, Literal):
return handle_addi_real(args.reg_ref(0), args.reg_ref(2), rhs, lhs, stack_info)
expr = BinaryOp(left=as_intptr(lhs), op="+", right=as_intptr(rhs), type=type)
folded_expr = fold_mul_chains(expr)
if isinstance(folded_expr, BinaryOp):
folded_expr = fold_divmod(folded_expr)
if folded_expr is not expr:
return folded_expr
array_expr = array_access_from_add(expr, 0, stack_info, target_size=None, ptr=True)
if array_expr is not None:
return array_expr
return expr
def handle_add_float(args: InstrArgs) -> Expression:
if args.reg_ref(1) == args.reg_ref(2):
two = Literal(1 << 30, type=Type.f32())
return BinaryOp.f32(two, "*", args.reg(1))
return BinaryOp.f32(args.reg(1), "+", args.reg(2))
def handle_add_double(args: InstrArgs) -> Expression:
if args.reg_ref(1) == args.reg_ref(2):
two = Literal(1 << 62, type=Type.f64())
return BinaryOp.f64(two, "*", args.dreg(1))
return BinaryOp.f64(args.dreg(1), "+", args.dreg(2))
def handle_bgez(args: InstrArgs) -> Condition:
expr = args.reg(0)
uw_expr = early_unwrap(expr)
if (
isinstance(uw_expr, BinaryOp)
and uw_expr.op == "<<"
and isinstance(uw_expr.right, Literal)
):
shift = uw_expr.right.value
bitand = BinaryOp.int(uw_expr.left, "&", Literal(1 << (31 - shift)))
return UnaryOp("!", bitand, type=Type.bool())
return BinaryOp.scmp(expr, ">=", Literal(0))
def rlwi_mask(mask_begin: int, mask_end: int) -> int:
# Compute the mask constant used by the rlwi* family of PPC instructions,
# referred to as the `MASK(MB, ME)` function in the processor manual.
# Bit 0 is the MSB, Bit 31 is the LSB
bits_upto: Callable[[int], int] = lambda m: (1 << (32 - m)) - 1
all_ones = 0xFFFFFFFF
if mask_begin <= mask_end:
# Set bits inside the range, fully inclusive
mask = bits_upto(mask_begin) - bits_upto(mask_end + 1)
else:
# Set bits from [31, mask_end] and [mask_begin, 0]
mask = (bits_upto(mask_end + 1) - bits_upto(mask_begin)) ^ all_ones
return mask
def handle_rlwinm(
source: Expression,
shift: int,
mask_begin: int,
mask_end: int,
simplify: bool = True,
) -> Expression:
# TODO: Detect shift + truncate, like `(x << 2) & 0xFFF3` or `(x >> 2) & 0x3FFF`
# The output of the rlwinm instruction is `ROTL(source, shift) & mask`. We write this as
# ((source << shift) & mask) | ((source >> (32 - shift)) & mask)
# and compute both OR operands (upper_bits and lower_bits respectively).
all_ones = 0xFFFFFFFF
mask = rlwi_mask(mask_begin, mask_end)
left_shift = shift
right_shift = 32 - shift
left_mask = (all_ones << left_shift) & mask
right_mask = (all_ones >> right_shift) & mask
# We only simplify if the `simplify` argument is True, and there will be no `|` in the
# resulting expression. If there is an `|`, the expression is best left as bitwise math
simplify = simplify and not (left_mask and right_mask)
if isinstance(source, Literal):
upper_value = (source.value << left_shift) & mask
lower_value = (source.value >> right_shift) & mask
return Literal(upper_value | lower_value)
upper_bits: Optional[Expression]
if left_mask == 0:
upper_bits = None
else:
upper_bits = source
if left_shift != 0:
upper_bits = BinaryOp.int(
left=upper_bits, op="<<", right=Literal(left_shift)
)
if simplify:
upper_bits = fold_mul_chains(upper_bits)
if left_mask != (all_ones << left_shift) & all_ones:
upper_bits = BinaryOp.int(left=upper_bits, op="&", right=Literal(left_mask))
if simplify:
upper_bits = replace_bitand(upper_bits)
lower_bits: Optional[Expression]
if right_mask == 0:
lower_bits = None
else:
lower_bits = BinaryOp.uint(left=source, op=">>", right=Literal(right_shift))
if simplify:
lower_bits = replace_clz_shift(fold_divmod(lower_bits))
if right_mask != (all_ones >> right_shift) & all_ones:
lower_bits = BinaryOp.int(
left=lower_bits, op="&", right=Literal(right_mask)
)
if simplify:
lower_bits = replace_bitand(lower_bits)
if upper_bits is None and lower_bits is None:
return Literal(0)
elif upper_bits is None:
assert lower_bits is not None
return lower_bits
elif lower_bits is None:
return upper_bits
else:
return BinaryOp.int(left=upper_bits, op="|", right=lower_bits)
def handle_rlwimi(
base: Expression, source: Expression, shift: int, mask_begin: int, mask_end: int
) -> Expression:
# This instruction reads from `base`, replaces some bits with values from `source`, then
# writes the result back into the first register. This can be used to copy any contiguous
# bitfield from `source` into `base`, and is commonly used when manipulating flags, such
# as in `x |= 0x10` or `x &= ~0x10`.
# It's generally more readable to write the mask with `~` (instead of computing the inverse here)
mask_literal = Literal(rlwi_mask(mask_begin, mask_end))
mask = UnaryOp("~", mask_literal, type=Type.u32())
masked_base = BinaryOp.int(left=base, op="&", right=mask)
if source == Literal(0):
# If the source is 0, there are no bits inserted. (This may look like `x &= ~0x10`)
return masked_base
# Set `simplify=False` to keep the `inserted` expression as bitwise math instead of `*` or `/`
inserted = handle_rlwinm(source, shift, mask_begin, mask_end, simplify=False)
if inserted == mask_literal:
# If this instruction will set all the bits in the mask, we can OR the values
# together without masking the base. (`x |= 0xF0` instead of `x = (x & ~0xF0) | 0xF0`)
return BinaryOp.int(left=base, op="|", right=inserted)
return BinaryOp.int(left=masked_base, op="|", right=inserted)
def handle_loadx(args: InstrArgs, type: Type) -> Expression:
# "indexed loads" like `lwzx rD, rA, rB` read `(rA + rB)` into `rD`
size = type.get_size_bytes()
assert size is not None
ptr = BinaryOp.intptr(left=args.reg(1), op="+", right=args.reg(2))
expr = deref(ptr, args.regs, args.stack_info, size=size)
return as_type(expr, type, silent=True)
def strip_macros(arg: Argument) -> Argument:
"""Replace %lo(...) by 0, and assert that there are no %hi(...). We assume that
%hi's only ever occur in lui, where we expand them to an entire value, and not
just the upper part. This preserves semantics in most cases (though not when %hi's
are reused for different %lo's...)"""
if isinstance(arg, Macro):
if arg.macro_name in ["sda2", "sda21"]:
return arg.argument
if arg.macro_name == "hi":
raise DecompFailure("%hi macro outside of lui")
if arg.macro_name not in ["lo", "l"]:
raise DecompFailure(f"Unrecognized linker macro %{arg.macro_name}")
# This is sort of weird; for `symbol@l` we return 0 here and assume
# that this @l is always perfectly paired with one other @ha.
# However, with `literal@l`, we return the literal value, and assume it is
# paired with another `literal@ha`. This lets us reuse `literal@ha` values,
# but assumes that we never mix literals & symbols
if isinstance(arg.argument, AsmLiteral):
return AsmLiteral(arg.argument.value)
return AsmLiteral(0)
elif isinstance(arg, AsmAddressMode) and isinstance(arg.lhs, Macro):
if arg.lhs.macro_name in ["sda2", "sda21"]:
return arg.lhs.argument
if arg.lhs.macro_name not in ["lo", "l"]:
raise DecompFailure(
f"Bad linker macro in instruction argument {arg}, expected %lo"
)
return AsmAddressMode(lhs=AsmLiteral(0), rhs=arg.rhs)
else:
return arg
@dataclass
class AbiArgSlot:
offset: int
reg: Optional[Register]
type: Type
name: Optional[str] = None
comment: Optional[str] = None
@dataclass
class Abi:
arg_slots: List[AbiArgSlot]
possible_slots: List[AbiArgSlot]
def reg_always_set(node: Node, reg: Register, *, dom_set: bool) -> bool:
if node.immediate_dominator is None:
return False
seen = {node.immediate_dominator}
stack = node.parents[:]
while stack:
n = stack.pop()
if n == node.immediate_dominator and not dom_set:
return False
if n in seen:
continue
seen.add(n)
clobbered: Optional[bool] = None
for instr in n.block.instructions:
with current_instr(instr):
if reg in instr.outputs:
clobbered = False
elif reg in instr.clobbers:
clobbered = True
if clobbered == True:
return False
if clobbered is None:
stack.extend(n.parents)
return True
def pick_phi_assignment_nodes(
reg: Register, nodes: List[Node], expr: Expression
) -> List[Node]:
"""
As part of `assign_phis()`, we need to pick a set of nodes where we can emit a
`SetPhiStmt` that assigns the phi for `reg` to `expr`.
The final register state for `reg` for each node in `nodes` is `expr`,
so the best case would be finding a single dominating node for the assignment.
"""
# Find the set of nodes which dominate *all* of `nodes`, sorted by number
# of dominators. (This puts "earlier" nodes at the beginning of the list.)
dominators = sorted(
set.intersection(*(node.dominators for node in nodes)),
key=lambda n: len(n.dominators),
)
# Check the dominators for a node with the correct final state for `reg`
for node in dominators:
regs = get_block_info(node).final_register_states
raw = regs.get_raw(reg)
meta = regs.get_meta(reg)
if raw is None or meta is None or meta.force:
continue
if raw == expr:
return [node]
# We couldn't find anything, so fall back to the naive solution
# TODO: In some cases there may be a better solution (e.g. one that requires 2 nodes)
return nodes
def assign_phis(used_phis: List[PhiExpr], stack_info: StackInfo) -> None:
i = 0
# Iterate over used phis until there are no more remaining. New ones may
# appear during iteration, hence the while loop.
while i < len(used_phis):
phi = used_phis[i]
assert phi.num_usages > 0
assert len(phi.node.parents) >= 2
# Group parent nodes by the value of their phi register
equivalent_nodes: DefaultDict[Expression, List[Node]] = defaultdict(list)
for node in phi.node.parents:
expr = get_block_info(node).final_register_states[phi.reg]
expr.type.unify(phi.type)
equivalent_nodes[expr].append(node)
exprs = list(equivalent_nodes.keys())
first_uw = early_unwrap(exprs[0])
if all(early_unwrap(e) == first_uw for e in exprs[1:]):
# All the phis have the same value (e.g. because we recomputed an
# expression after a store, or restored a register after a function
# call). Just use that value instead of introducing a phi node.
# TODO: the unwrapping here is necessary, but also kinda sketchy:
# we may set as replacement_expr an expression that really shouldn't
# be repeated, e.g. a StructAccess. It would make sense to use less
# eager unwrapping, and/or to emit an EvalOnceExpr at this point
# (though it's too late for it to be able to participate in the
# prevent_later_uses machinery).
phi.replacement_expr = as_type(first_uw, phi.type, silent=True)
for _ in range(phi.num_usages):
first_uw.use()
else:
for expr, nodes in equivalent_nodes.items():
for node in pick_phi_assignment_nodes(phi.reg, nodes, expr):
block_info = get_block_info(node)
expr = block_info.final_register_states[phi.reg]
if isinstance(expr, PhiExpr):
# Explicitly mark how the expression is used if it's a phi,
# so we can propagate phi sets (to get rid of temporaries).
expr.use(from_phi=phi)
else:
expr.use()
typed_expr = as_type(expr, phi.type, silent=True)
block_info.to_write.append(SetPhiStmt(phi, typed_expr))
i += 1
name_counter: Dict[Register, int] = {}
for phi in used_phis:
if not phi.replacement_expr and phi.propagates_to() == phi:
counter = name_counter.get(phi.reg, 0) + 1
name_counter[phi.reg] = counter
output_reg_name = stack_info.function.reg_formatter.format(phi.reg)
prefix = f"phi_{output_reg_name}"
phi.name = f"{prefix}_{counter}" if counter > 1 else prefix
stack_info.phi_vars.append(phi)
def propagate_register_meta(nodes: List[Node], reg: Register) -> None:
"""Propagate RegMeta bits forwards/backwards."""
non_terminal: List[Node] = [n for n in nodes if not isinstance(n, TerminalNode)]
# Set `is_read` based on `read_inherited`.
for n in non_terminal:
if reg in get_block_info(n).final_register_states.read_inherited:
for p in n.parents:
par_meta = get_block_info(p).final_register_states.get_meta(reg)
if par_meta:
par_meta.is_read = True
# Propagate `is_read` backwards.
todo = non_terminal[:]
while todo:
n = todo.pop()
meta = get_block_info(n).final_register_states.get_meta(reg)
for p in n.parents:
par_meta = get_block_info(p).final_register_states.get_meta(reg)
if (par_meta and not par_meta.is_read) and (
meta and meta.inherited and meta.is_read
):
par_meta.is_read = True
todo.append(p)
# Set `uninteresting` and propagate it, `function_return`, and `in_pattern` forwards.
# Start by assuming inherited values are all set; they will get unset iteratively,
# but for cyclic dependency purposes we want to assume them set.
for n in non_terminal:
meta = get_block_info(n).final_register_states.get_meta(reg)
if meta:
if meta.inherited:
meta.uninteresting = True
meta.function_return = True
meta.in_pattern = True
else:
meta.uninteresting |= (
meta.is_read or meta.function_return or meta.in_pattern
)
todo = non_terminal[:]
while todo:
n = todo.pop()
if isinstance(n, TerminalNode):
continue
meta = get_block_info(n).final_register_states.get_meta(reg)
if not meta or not meta.inherited:
continue
all_uninteresting = True
all_function_return = True
all_in_pattern = True
for p in n.parents:
par_meta = get_block_info(p).final_register_states.get_meta(reg)
if par_meta:
all_uninteresting &= par_meta.uninteresting
all_function_return &= par_meta.function_return
all_in_pattern &= par_meta.in_pattern
if meta.uninteresting and not all_uninteresting and not meta.is_read:
meta.uninteresting = False
todo.extend(n.children())
if meta.function_return and not all_function_return:
meta.function_return = False
todo.extend(n.children())
if meta.in_pattern and not all_in_pattern:
meta.in_pattern = False
todo.extend(n.children())
def determine_return_register(
return_blocks: List[BlockInfo], fn_decl_provided: bool, arch: Arch
) -> Optional[Register]:
"""Determine which of the arch's base_return_regs (i.e. v0, f0) is the most
likely to contain the return value, or if the function is likely void."""
def priority(block_info: BlockInfo, reg: Register) -> int:
meta = block_info.final_register_states.get_meta(reg)
if not meta:
return 4
if meta.uninteresting:
return 2
if meta.in_pattern:
return 1
if meta.function_return:
return 0
return 3
if not return_blocks:
return None
best_reg: Optional[Register] = None
best_prio = -1
for reg in arch.base_return_regs:
prios = [priority(b, reg) for b in return_blocks]
max_prio = max(prios)
if max_prio == 4:
# Register is not always set, skip it
continue
if max_prio <= 2 and not fn_decl_provided:
# Register is always read after being written, or comes from a
# function call; seems unlikely to be an intentional return.
# Skip it, unless we have a known non-void return type.
continue
if max_prio > best_prio:
best_prio = max_prio
best_reg = reg
return best_reg
def translate_node_body(node: Node, regs: RegInfo, stack_info: StackInfo) -> BlockInfo:
"""
Given a node and current register contents, return a BlockInfo containing
the translated AST for that node.
"""
to_write: List[Union[Statement]] = []
local_var_writes: Dict[LocalVar, Tuple[Register, Expression]] = {}
subroutine_args: Dict[int, Expression] = {}
branch_condition: Optional[Condition] = None
switch_expr: Optional[Expression] = None
has_custom_return: bool = False
has_function_call: bool = False
in_pattern: bool = False
arch = stack_info.global_info.arch
def eval_once(
expr: Expression,
*,
emit_exactly_once: bool,
trivial: bool,
prefix: str = "",
reuse_var: Optional[Var] = None,
) -> EvalOnceExpr:
if emit_exactly_once:
# (otherwise this will be marked used once num_usages reaches 1)
expr.use()
elif "_fictive_" in prefix and isinstance(expr, EvalOnceExpr):
# Avoid creating additional EvalOnceExprs for fictive Registers
# so they're less likely to appear in the output
return expr
assert reuse_var or prefix
if prefix == "condition_bit":
prefix = "cond"
var = reuse_var or Var(stack_info, "temp_" + prefix)
expr = EvalOnceExpr(
wrapped_expr=expr,
var=var,
type=expr.type,
emit_exactly_once=emit_exactly_once,
trivial=trivial,
)
var.num_usages += 1
stmt = EvalOnceStmt(expr)
to_write.append(stmt)
stack_info.temp_vars.append(stmt)
return expr
def prevent_later_uses(expr_filter: Callable[[Expression], bool]) -> None:
"""Prevent later uses of registers whose contents match a callback filter."""
for r in regs.contents.keys():
data = regs.contents.get(r)
assert data is not None
expr = data.value
if not data.meta.force and expr_filter(expr):
# Mark the register as "if used, emit the expression's once
# var". We usually always have a once var at this point,
# but if we don't, create one.
if not isinstance(expr, EvalOnceExpr):
expr = eval_once(
expr,
emit_exactly_once=False,
trivial=False,
prefix=stack_info.function.reg_formatter.format(r),
)
# This write isn't changing the value of the register; it didn't need
# to be declared as part of the current instruction's inputs/outputs.
regs.unchecked_set_with_meta(r, expr, replace(data.meta, force=True))
def prevent_later_value_uses(sub_expr: Expression) -> None:
"""Prevent later uses of registers that recursively contain a given
subexpression."""
# Unused PassedInArg are fine; they can pass the uses_expr test simply based
# on having the same variable name. If we didn't filter them out here it could
# cause them to be incorrectly passed as function arguments -- the function
# call logic sees an opaque wrapper and doesn't realize that they are unused
# arguments that should not be passed on.
prevent_later_uses(
lambda e: uses_expr(e, lambda e2: e2 == sub_expr)
and not (isinstance(e, PassedInArg) and not e.copied)
)
def prevent_later_function_calls() -> None:
"""Prevent later uses of registers that recursively contain a function call."""
prevent_later_uses(lambda e: uses_expr(e, lambda e2: isinstance(e2, FuncCall)))
def prevent_later_reads() -> None:
"""Prevent later uses of registers that recursively contain a read."""
contains_read = lambda e: isinstance(e, (StructAccess, ArrayAccess))
prevent_later_uses(lambda e: uses_expr(e, contains_read))
def set_reg_maybe_return(reg: Register, expr: Expression) -> None:
regs.set_with_meta(reg, expr, RegMeta(in_pattern=in_pattern))
def set_reg(reg: Register, expr: Optional[Expression]) -> Optional[Expression]:
if expr is None:
if reg in regs:
del regs[reg]
return None
if isinstance(expr, LocalVar):
if (
isinstance(node, ReturnNode)
and stack_info.maybe_get_register_var(reg)
and stack_info.in_callee_save_reg_region(expr.value)
and reg in stack_info.callee_save_regs
):
# Elide saved register restores with --reg-vars (it doesn't
# matter in other cases).
return None
if expr in local_var_writes:
# Elide register restores (only for the same register for now,
# to be conversative).
orig_reg, orig_expr = local_var_writes[expr]
if orig_reg == reg:
expr = orig_expr
uw_expr = expr
if not isinstance(expr, Literal):
expr = eval_once(
expr,
emit_exactly_once=False,
trivial=is_trivial_expression(expr),
prefix=stack_info.function.reg_formatter.format(reg),
)
if reg == Register("zero"):
# Emit the expression as is. It's probably a volatile load.
expr.use()
to_write.append(ExprStmt(expr))
else:
dest = stack_info.maybe_get_register_var(reg)
if dest is not None:
stack_info.use_register_var(dest)
# Avoid emitting x = x, but still refresh EvalOnceExpr's etc.
if not (isinstance(uw_expr, RegisterVar) and uw_expr.reg == reg):
source = as_type(expr, dest.type, True)
source.use()
to_write.append(StoreStmt(source=source, dest=dest))
expr = dest
set_reg_maybe_return(reg, expr)
return expr
def clear_caller_save_regs() -> None:
for reg in arch.temp_regs:
if reg in regs:
del regs[reg]
def maybe_clear_local_var_writes(func_args: List[Expression]) -> None:
# Clear the `local_var_writes` dict if any of the `func_args` contain
# a reference to a stack var. (The called function may modify the stack,
# replacing the value we have in `local_var_writes`.)
for arg in func_args:
if uses_expr(
arg,
lambda expr: isinstance(expr, AddressOf)
and isinstance(expr.expr, LocalVar),
):
local_var_writes.clear()
return
def process_instr(instr: Instruction) -> None:
nonlocal branch_condition, switch_expr, has_function_call, in_pattern
in_pattern = instr.in_pattern
mnemonic = instr.mnemonic
arch_mnemonic = instr.arch_mnemonic(arch)
args = InstrArgs(instr.args, regs, stack_info)
expr: Expression
# Figure out what code to generate!
if mnemonic in arch.instrs_ignore:
pass
elif mnemonic in arch.instrs_store or mnemonic in arch.instrs_store_update:
# Store a value in a permanent place.
if mnemonic in arch.instrs_store:
to_store = arch.instrs_store[mnemonic](args)
else:
# PPC specific store-and-update instructions
# `stwu r3, 8(r4)` is equivalent to `$r3 = *($r4 + 8); $r4 += 8;`
to_store = arch.instrs_store_update[mnemonic](args)
# Update the register in the second argument
update = args.memory_ref(1)
if not isinstance(update, AddressMode):
raise DecompFailure(
f"Unhandled store-and-update arg in {instr}: {update!r}"
)
set_reg(
update.rhs,
add_imm(args.regs[update.rhs], Literal(update.offset), stack_info),
)
if to_store is None:
# Elided register preserval.
pass
elif isinstance(to_store.dest, SubroutineArg):
# About to call a subroutine with this argument. Skip arguments for the
# first four stack slots; they are also passed in registers.
if to_store.dest.value >= 0x10:
subroutine_args[to_store.dest.value] = to_store.source
else:
if isinstance(to_store.dest, LocalVar):
stack_info.add_local_var(to_store.dest)
raw_value = to_store.source
if isinstance(raw_value, Cast) and raw_value.reinterpret:
# When preserving values on the stack across function calls,
# ignore the type of the stack variable. The same stack slot
# might be used to preserve values of different types.
raw_value = raw_value.expr
local_var_writes[to_store.dest] = (args.reg_ref(0), raw_value)
# Emit a write. This includes four steps:
# - mark the expression as used (since writes are always emitted)
# - mark the dest used (if it's a struct access it needs to be
# evaluated, though ideally we would not mark the top-level expression
# used; it may cause early emissions that shouldn't happen)
# - mark other usages of the dest as "emit before this point if used".
# - emit the actual write.
#
# Note that the prevent_later_value_uses step happens after use(), since
# the stored expression is allowed to reference its destination var,
# but before the write is written, since prevent_later_value_uses might
# emit writes of its own that should go before this write. In practice
# that probably never occurs -- all relevant register contents should be
# EvalOnceExpr's that can be emitted at their point of creation, but
# I'm not 100% certain that that's always the case and will remain so.
to_store.source.use()
to_store.dest.use()
prevent_later_value_uses(to_store.dest)
prevent_later_function_calls()
to_write.append(to_store)
elif mnemonic in arch.instrs_source_first:
# Just 'mtc1'. It's reversed, so we have to specially handle it.
set_reg(args.reg_ref(1), arch.instrs_source_first[mnemonic](args))
elif mnemonic in arch.instrs_branches:
assert branch_condition is None
branch_condition = arch.instrs_branches[mnemonic](args)
elif mnemonic in arch.instrs_float_branches:
assert branch_condition is None
cond_bit = regs[Register("condition_bit")]
if not isinstance(cond_bit, BinaryOp):
cond_bit = ExprCondition(cond_bit, type=cond_bit.type)
if arch_mnemonic == "mips:bc1t":
branch_condition = cond_bit
elif arch_mnemonic == "mips:bc1f":
branch_condition = cond_bit.negated()
elif mnemonic in arch.instrs_jumps:
if arch_mnemonic == "ppc:bctr":
# Switch jump
assert isinstance(node, SwitchNode)
switch_expr = args.regs[Register("ctr")]
elif arch_mnemonic == "mips:jr":
# MIPS:
if args.reg_ref(0) == arch.return_address_reg:
# Return from the function.
assert isinstance(node, ReturnNode)
else:
# Switch jump.
assert isinstance(node, SwitchNode)
switch_expr = args.reg(0)
elif arch_mnemonic == "ppc:blr":
assert isinstance(node, ReturnNode)
else:
assert False, f"Unhandled jump mnemonic {arch_mnemonic}"
elif mnemonic in arch.instrs_fn_call:
if arch_mnemonic in ["mips:jal", "ppc:bl"]:
fn_target = args.imm(0)
if not (
(
isinstance(fn_target, AddressOf)
and isinstance(fn_target.expr, GlobalSymbol)
)
or isinstance(fn_target, Literal)
):
raise DecompFailure(
f"Target of function call must be a symbol, not {fn_target}"
)
elif arch_mnemonic == "ppc:blrl":
fn_target = args.regs[Register("lr")]
elif arch_mnemonic == "ppc:bctrl":
fn_target = args.regs[Register("ctr")]
elif arch_mnemonic == "mips:jalr":
fn_target = args.reg(1)
else:
assert False, f"Unhandled fn call mnemonic {arch_mnemonic}"
fn_target = as_function_ptr(fn_target)
fn_sig = fn_target.type.get_function_pointer_signature()
assert fn_sig is not None, "known function pointers must have a signature"
likely_regs: Dict[Register, bool] = {}
for reg, data in regs.contents.items():
# We use a much stricter filter for PPC than MIPS, because the same
# registers can be used arguments & return values.
# The ABI can also mix & match the rN & fN registers, which makes the
# "require" heuristic less powerful.
#
# - `meta.inherited` will only be False for registers set in *this* basic block
# - `meta.function_return` will only be accurate for registers set within this
# basic block because we have not called `propagate_register_meta` yet.
# Within this block, it will be True for registers that were return values.
if arch.arch == Target.ArchEnum.PPC and (
data.meta.inherited or data.meta.function_return
):
likely_regs[reg] = False
elif data.meta.in_pattern:
# Like `meta.function_return` mentioned above, `meta.in_pattern` will only be
# accurate for registers set within this basic block.
likely_regs[reg] = False
elif isinstance(data.value, PassedInArg) and not data.value.copied:
likely_regs[reg] = False
else:
likely_regs[reg] = True
abi = arch.function_abi(fn_sig, likely_regs, for_call=True)
func_args: List[Expression] = []
for slot in abi.arg_slots:
if slot.reg:
expr = regs[slot.reg]
elif slot.offset in subroutine_args:
expr = subroutine_args.pop(slot.offset)
else:
expr = ErrorExpr(
f"Unable to find stack arg {slot.offset:#x} in block"
)
func_args.append(
CommentExpr.wrap(
as_type(expr, slot.type, True), prefix=slot.comment
)
)
for slot in abi.possible_slots:
assert slot.reg is not None
func_args.append(regs[slot.reg])
# Add the arguments after a3.
# TODO: limit this based on abi.arg_slots. If the function type is known
# and not variadic, this list should be empty.
for _, arg in sorted(subroutine_args.items()):
if fn_sig.params_known and not fn_sig.is_variadic:
func_args.append(CommentExpr.wrap(arg, prefix="extra?"))
else:
func_args.append(arg)
if not fn_sig.params_known:
while len(func_args) > len(fn_sig.params):
fn_sig.params.append(FunctionParam())
# When the function signature isn't provided, the we only assume that each
# parameter is "simple" (<=4 bytes, no return struct, etc.). This may not
# match the actual function signature, but it's the best we can do.
# Without that assumption, the logic from `function_abi` would be needed here.
for i, (arg_expr, param) in enumerate(zip(func_args, fn_sig.params)):
func_args[i] = as_type(arg_expr, param.type.decay(), True)
# Reset subroutine_args, for the next potential function call.
subroutine_args.clear()
call: Expression = FuncCall(
fn_target, func_args, fn_sig.return_type.weaken_void_ptr()
)
call = eval_once(call, emit_exactly_once=True, trivial=False, prefix="ret")
# Clear out caller-save registers, for clarity and to ensure that
# argument regs don't get passed into the next function.
clear_caller_save_regs()
# Clear out local var write tracking if any argument contains a stack
# reference. That dict is used to track register saves/restores, which
# are unreliable if we call a function with a stack reference.
maybe_clear_local_var_writes(func_args)
# Prevent reads and function calls from moving across this call.
# This isn't really right, because this call might be moved later,
# and then this prevention should also be... but it's the best we
# can do with the current code architecture.
prevent_later_function_calls()
prevent_later_reads()
return_reg_vals = arch.function_return(call)
for out in instr.outputs:
if not isinstance(out, Register):
continue
val = return_reg_vals[out]
if not isinstance(val, SecondF64Half):
val = eval_once(
val,
emit_exactly_once=False,
trivial=False,
prefix=stack_info.function.reg_formatter.format(out),
)
regs.set_with_meta(out, val, RegMeta(function_return=True))
has_function_call = True
elif mnemonic in arch.instrs_float_comp:
expr = arch.instrs_float_comp[mnemonic](args)
regs[Register("condition_bit")] = expr
elif mnemonic in arch.instrs_hi_lo:
hi, lo = arch.instrs_hi_lo[mnemonic](args)
set_reg(Register("hi"), hi)
set_reg(Register("lo"), lo)
elif mnemonic in arch.instrs_implicit_destination:
reg, expr_fn = arch.instrs_implicit_destination[mnemonic]
set_reg(reg, expr_fn(args))
elif mnemonic in arch.instrs_ppc_compare:
if instr.args[0] != Register("cr0"):
raise DecompFailure(
f"Instruction {instr} not supported (first arg is not $cr0)"
)
set_reg(Register("cr0_eq"), arch.instrs_ppc_compare[mnemonic](args, "=="))
set_reg(Register("cr0_gt"), arch.instrs_ppc_compare[mnemonic](args, ">"))
set_reg(Register("cr0_lt"), arch.instrs_ppc_compare[mnemonic](args, "<"))
set_reg(Register("cr0_so"), Literal(0))
elif mnemonic in arch.instrs_no_dest:
stmt = arch.instrs_no_dest[mnemonic](args)
to_write.append(stmt)
elif mnemonic.rstrip(".") in arch.instrs_destination_first:
target = args.reg_ref(0)
val = arch.instrs_destination_first[mnemonic.rstrip(".")](args)
# TODO: IDO tends to keep variables within single registers. Thus,
# if source = target, maybe we could overwrite that variable instead
# of creating a new one?
target_val = set_reg(target, val)
mn_parts = arch_mnemonic.split(".")
if arch_mnemonic.startswith("ppc:") and arch_mnemonic.endswith("."):
# PPC instructions suffixed with . set condition bits (CR0) based on the result value
if target_val is None:
target_val = val
set_reg(
Register("cr0_eq"),
BinaryOp.icmp(target_val, "==", Literal(0, type=target_val.type)),
)
# Use manual casts for cr0_gt/cr0_lt so that the type of target_val is not modified
# until the resulting bit is .use()'d.
target_s32 = Cast(
target_val, reinterpret=True, silent=True, type=Type.s32()
)
set_reg(
Register("cr0_gt"),
BinaryOp(target_s32, ">", Literal(0), type=Type.s32()),
)
set_reg(
Register("cr0_lt"),
BinaryOp(target_s32, "<", Literal(0), type=Type.s32()),
)
set_reg(
Register("cr0_so"),
fn_op("MIPS2C_OVERFLOW", [target_val], type=Type.s32()),
)
elif (
len(mn_parts) >= 2
and mn_parts[0].startswith("mips:")
and mn_parts[1] == "d"
) or arch_mnemonic == "mips:ldc1":
set_reg(target.other_f64_reg(), SecondF64Half())
elif mnemonic in arch.instrs_load_update:
target = args.reg_ref(0)
val = arch.instrs_load_update[mnemonic](args)
set_reg(target, val)
if arch_mnemonic in ["ppc:lwzux", "ppc:lhzux", "ppc:lbzux"]:
# In `rD, rA, rB`, update `rA = rA + rB`
update_reg = args.reg_ref(1)
offset = args.reg(2)
else:
# In `rD, rA(N)`, update `rA = rA + N`
update = args.memory_ref(1)
if not isinstance(update, AddressMode):
raise DecompFailure(
f"Unhandled store-and-update arg in {instr}: {update!r}"
)
update_reg = update.rhs
offset = Literal(update.offset)
if update_reg == target:
raise DecompFailure(
f"Invalid instruction, rA and rD must be different in {instr}"
)
set_reg(update_reg, add_imm(args.regs[update_reg], offset, stack_info))
else:
expr = ErrorExpr(f"unknown instruction: {instr}")
if arch_mnemonic.startswith("ppc:") and arch_mnemonic.endswith("."):
# Unimplemented PPC instructions that modify CR0
set_reg(Register("cr0_eq"), expr)
set_reg(Register("cr0_gt"), expr)
set_reg(Register("cr0_lt"), expr)
set_reg(Register("cr0_so"), expr)
if args.count() >= 1 and isinstance(args.raw_arg(0), Register):
reg = args.reg_ref(0)
expr = eval_once(
expr,
emit_exactly_once=True,
trivial=False,
prefix=stack_info.function.reg_formatter.format(reg),
)
if reg != Register("zero"):
set_reg_maybe_return(reg, expr)
else:
to_write.append(ExprStmt(expr))
for instr in node.block.instructions:
with regs.current_instr(instr):
process_instr(instr)
if branch_condition is not None:
branch_condition.use()
switch_control: Optional[SwitchControl] = None
if switch_expr is not None:
switch_control = SwitchControl.from_expr(switch_expr)
switch_control.control_expr.use()
return BlockInfo(
to_write=to_write,
return_value=None,
switch_control=switch_control,
branch_condition=branch_condition,
final_register_states=regs,
has_function_call=has_function_call,
)
def translate_graph_from_block(
node: Node,
regs: RegInfo,
stack_info: StackInfo,
used_phis: List[PhiExpr],
return_blocks: List[BlockInfo],
options: Options,
) -> None:
"""
Given a FlowGraph node and a dictionary of register contents, give that node
its appropriate BlockInfo (which contains the AST of its code).
"""
if options.debug:
print(f"\nNode in question: {node}")
# Translate the given node and discover final register states.
try:
block_info = translate_node_body(node, regs, stack_info)
if options.debug:
print(block_info)
except Exception as e: # TODO: handle issues better
if options.stop_on_error:
raise
instr: Optional[Instruction] = None
if isinstance(e, InstrProcessingFailure) and isinstance(e.__cause__, Exception):
instr = e.instr
e = e.__cause__
if isinstance(e, DecompFailure):
emsg = str(e)
print(emsg)
else:
tb = e.__traceback__
traceback.print_exception(None, e, tb)
emsg = str(e) or traceback.format_tb(tb)[-1]
emsg = emsg.strip().split("\n")[-1].strip()
error_stmts: List[Statement] = [CommentStmt(f"Error: {emsg}")]
if instr is not None:
print(
f"Error occurred while processing instruction: {instr}", file=sys.stderr
)
error_stmts.append(CommentStmt(f"At instruction: {instr}"))
print(file=sys.stderr)
block_info = BlockInfo(
to_write=error_stmts,
return_value=None,
switch_control=None,
branch_condition=ErrorExpr(),
final_register_states=regs,
has_function_call=False,
)
node.block.add_block_info(block_info)
if isinstance(node, ReturnNode):
return_blocks.append(block_info)
# Translate everything dominated by this node, now that we know our own
# final register state. This will eventually reach every node.
for child in node.immediately_dominates:
if isinstance(child, TerminalNode):
continue
new_regs = RegInfo(stack_info=stack_info)
for reg, data in regs.contents.items():
new_regs.set_with_meta(
reg, data.value, RegMeta(inherited=True, force=data.meta.force)
)
phi_regs = (
r for r in locs_clobbered_until_dominator(child) if isinstance(r, Register)
)
for reg in phi_regs:
if reg_always_set(child, reg, dom_set=(reg in regs)):
expr: Optional[Expression] = stack_info.maybe_get_register_var(reg)
if expr is None:
expr = PhiExpr(
reg=reg, node=child, used_phis=used_phis, type=Type.any_reg()
)
new_regs.set_with_meta(reg, expr, RegMeta(inherited=True))
elif reg in new_regs:
del new_regs[reg]
translate_graph_from_block(
child, new_regs, stack_info, used_phis, return_blocks, options
)
def resolve_types_late(stack_info: StackInfo) -> None:
"""
After translating a function, perform a final type-resolution pass.
"""
# Final check over stack var types. Because of delayed type unification, some
# locations should now be marked as "weak".
for location in stack_info.weak_stack_var_types.keys():
stack_info.get_stack_var(location, store=False)
# Use dereferences to determine pointer types
struct_type_map = stack_info.get_struct_type_map()
for var, offset_type_map in struct_type_map.items():
if len(offset_type_map) == 1 and 0 in offset_type_map:
# var was probably a plain pointer, not a struct
# Try to unify it with the appropriate pointer type,
# to fill in the type if it does not already have one
type = offset_type_map[0]
var.type.unify(Type.ptr(type))
@dataclass
class FunctionInfo:
stack_info: StackInfo
flow_graph: FlowGraph
return_type: Type
symbol: GlobalSymbol
@dataclass
class GlobalInfo:
asm_data: AsmData
arch: Arch
target: Target
local_functions: Set[str]
typemap: TypeMap
typepool: TypePool
global_symbol_map: Dict[str, GlobalSymbol] = field(default_factory=dict)
def asm_data_value(self, sym_name: str) -> Optional[AsmDataEntry]:
return self.asm_data.values.get(sym_name)
def address_of_gsym(self, sym_name: str) -> AddressOf:
if sym_name in self.global_symbol_map:
sym = self.global_symbol_map[sym_name]
else:
demangled_symbol: Optional[CxxSymbol] = None
demangled_str: Optional[str] = None
if self.target.language == Target.LanguageEnum.CXX:
try:
demangled_symbol = demangle_codewarrior_parse(sym_name)
except ValueError:
pass
else:
demangled_str = str(demangled_symbol)
sym = self.global_symbol_map[sym_name] = GlobalSymbol(
symbol_name=sym_name,
type=Type.any(),
asm_data_entry=self.asm_data_value(sym_name),
demangled_str=demangled_str,
)
# If the symbol is a C++ vtable, try to build a custom type for it by parsing it
if (
self.target.language == Target.LanguageEnum.CXX
and sym_name.startswith("__vt__")
and sym.asm_data_entry is not None
):
sym.type.unify(self.vtable_type(sym_name, sym.asm_data_entry))
fn = self.typemap.functions.get(sym_name)
ctype: Optional[CType]
if fn is not None:
ctype = fn.type
else:
ctype = self.typemap.var_types.get(sym_name)
if ctype is not None:
sym.symbol_in_context = True
sym.initializer_in_typemap = (
sym_name in self.typemap.vars_with_initializers
)
sym.type.unify(Type.ctype(ctype, self.typemap, self.typepool))
if sym_name not in self.typepool.unknown_decls:
sym.type_provided = True
elif sym_name in self.local_functions:
sym.type.unify(Type.function())
# Do this after unifying the type in the typemap, so that it has lower precedence
if demangled_symbol is not None:
sym.type.unify(
Type.demangled_symbol(self.typemap, self.typepool, demangled_symbol)
)
return AddressOf(sym, type=sym.type.reference())
def vtable_type(self, sym_name: str, asm_data_entry: AsmDataEntry) -> Type:
"""
Parse MWCC vtable data to create a custom struct to represent it.
This format is not well documented, but is briefly explored in this series of posts:
https://web.archive.org/web/20220413174849/http://hacksoflife.blogspot.com/2007/02/c-objects-part-2-single-inheritance.html
"""
size = asm_data_entry.size_range_bytes()[1]
struct = StructDeclaration.unknown(
self.typepool, size=size, align=4, tag_name=sym_name
)
offset = 0
for entry in asm_data_entry.data:
if isinstance(entry, bytes):
# MWCC vtables start with a pointer to a typeid struct (or NULL) and an offset
if len(entry) % 4 != 0:
raise DecompFailure(
f"Unable to parse misaligned vtable data in {sym_name}"
)
for i in range(len(entry) // 4):
field_name = f"{struct.new_field_prefix}{offset:X}"
struct.try_add_field(
Type.reg32(likely_float=False), offset, field_name, size=4
)
offset += 4
else:
entry_name = entry
try:
demangled_field_sym = demangle_codewarrior_parse(entry)
if demangled_field_sym.name.qualified_name is not None:
entry_name = str(demangled_field_sym.name.qualified_name[-1])
except ValueError:
pass
field = struct.try_add_field(
self.address_of_gsym(entry).type,
offset,
name=entry_name,
size=4,
)
assert field is not None
field.known = True
offset += 4
return Type.struct(struct)
def is_function_known_void(self, sym_name: str) -> bool:
"""Return True if the function exists in the context, and has no return value"""
fn = self.typemap.functions.get(sym_name)
if fn is None:
return False
return fn.ret_type is None
def initializer_for_symbol(
self, sym: GlobalSymbol, fmt: Formatter
) -> Optional[str]:
assert sym.asm_data_entry is not None
data = sym.asm_data_entry.data[:]
def read_uint(n: int) -> Optional[int]:
"""Read the next `n` bytes from `data` as an (long) integer"""
assert 0 < n <= 8
if not data or not isinstance(data[0], bytes):
return None
if len(data[0]) < n:
return None
bs = data[0][:n]
data[0] = data[0][n:]
if not data[0]:
del data[0]
value = 0
for b in bs:
value = (value << 8) | b
return value
def read_pointer() -> Optional[Expression]:
"""Read the next label from `data`"""
if not data or not isinstance(data[0], str):
return None
label = data[0]
data.pop(0)
return self.address_of_gsym(label)
def for_type(type: Type) -> Optional[str]:
"""Return the initializer for a single element of type `type`"""
if type.is_struct() or type.is_array():
struct_fields = type.get_initializer_fields()
if not struct_fields:
return None
members = []
for field in struct_fields:
if isinstance(field, int):
# Check that all padding bytes are 0
for i in range(field):
padding = read_uint(1)
if padding != 0:
return None
else:
m = for_type(field)
if m is None:
return None
members.append(m)
return fmt.format_array(members)
if type.is_reg():
size = type.get_size_bytes()
if not size:
return None
if size == 4:
ptr = read_pointer()
if ptr is not None:
return as_type(ptr, type, silent=True).format(fmt)
value = read_uint(size)
if value is not None:
enum_name = type.get_enum_name(value)
if enum_name is not None:
return enum_name
expr = as_type(Literal(value), type, True)
return elide_casts_for_store(expr).format(fmt)
# Type kinds K_FN and K_VOID do not have initializers
return None
return for_type(sym.type)
def find_forward_declares_needed(self, functions: List[FunctionInfo]) -> Set[str]:
funcs_seen = set()
forward_declares_needed = self.asm_data.mentioned_labels
for func in functions:
funcs_seen.add(func.stack_info.function.name)
for instr in func.stack_info.function.body:
if not isinstance(instr, Instruction):
continue
for arg in instr.args:
if isinstance(arg, AsmGlobalSymbol):
func_name = arg.symbol_name
elif isinstance(arg, Macro) and isinstance(
arg.argument, AsmGlobalSymbol
):
func_name = arg.argument.symbol_name
else:
continue
if func_name in self.local_functions:
if func_name not in funcs_seen:
forward_declares_needed.add(func_name)
return forward_declares_needed
def global_decls(
self,
fmt: Formatter,
decls: Options.GlobalDeclsEnum,
functions: List[FunctionInfo],
) -> str:
# Format labels from symbol_type_map into global declarations.
# As the initializers are formatted, this may cause more symbols
# to be added to the global_symbol_map.
forward_declares_needed = self.find_forward_declares_needed(functions)
lines = []
processed_names: Set[str] = set()
while True:
names: AbstractSet[str] = self.global_symbol_map.keys()
if decls == Options.GlobalDeclsEnum.ALL:
names |= self.asm_data.values.keys()
names -= processed_names
if not names:
break
for name in sorted(names):
processed_names.add(name)
sym = self.address_of_gsym(name).expr
assert isinstance(sym, GlobalSymbol)
data_entry = sym.asm_data_entry
# Is the label defined in this unit (in the active AsmData file(s))
is_in_file = data_entry is not None or name in self.local_functions
# Is the label externally visible (mentioned in the context file)
is_global = sym.symbol_in_context
# Is the label a symbol in .rodata?
is_const = data_entry is not None and data_entry.is_readonly
if data_entry and data_entry.is_jtbl:
# Skip jump tables
continue
if is_in_file and is_global and sym.type.is_function():
# Skip externally-declared functions that are defined here
continue
if self.local_functions == {name}:
# Skip the function being decompiled if just a single one
continue
if not is_in_file and sym.type_provided:
# Skip externally-declared symbols that are defined in other files
continue
# TODO: Use original MIPSFile ordering for variables
sort_order = (
not sym.type.is_function(),
is_global,
is_in_file,
is_const,
name,
)
qualifier = ""
value: Optional[str] = None
comments = []
# Determine type qualifier: static, extern, or neither
if is_in_file and is_global:
qualifier = ""
elif is_in_file:
qualifier = "static"
else:
qualifier = "extern"
if sym.type.is_function():
comments.append(qualifier)
qualifier = ""
# Try to guess if the symbol is an array (and if it is, its dimension) if
# we have a data entry for it, and the symbol is either not in the typemap
# or was a variable-length array there ("VLA", e.g. `int []`)
# (Otherwise, if the dim is provided by the typemap, we trust it.)
element_type, array_dim = sym.type.get_array()
is_vla = element_type is not None and (
array_dim is None or array_dim <= 0
)
if data_entry and (not sym.type_provided or is_vla):
# The size of the data entry is uncertain, because of padding
# between sections. Generally `(max_data_size - data_size) < 16`.
min_data_size, max_data_size = data_entry.size_range_bytes()
# The size of the element type (not the size of the array type)
if element_type is None:
element_type = sym.type
# If we don't know the type, we can't guess the array_dim
type_size = element_type.get_size_bytes()
if type_size:
potential_dim, extra_bytes = sym.potential_array_dim(type_size)
if potential_dim == 0 and extra_bytes > 0:
# The type is too big for our data. (not an array)
comments.append(
f"type too large by {fmt.format_int(type_size - extra_bytes)}"
)
elif potential_dim > 1 or is_vla:
# NB: In general, replacing the types of Expressions can be sketchy.
# However, the GlobalSymbol here came from address_of_gsym(), which
# always returns a reference to the element_type.
array_dim = potential_dim
sym.type = Type.array(element_type, array_dim)
if potential_dim != 0 and extra_bytes > 0:
comments.append(
f"extra bytes: {fmt.format_int(extra_bytes)}"
)
# Try to convert the data from .data/.rodata into an initializer
if data_entry and not data_entry.is_bss:
value = self.initializer_for_symbol(sym, fmt)
if value is None:
# This warning helps distinguish .bss symbols from .data/.rodata,
# IDO only puts symbols in .bss if they don't have any initializer
comments.append("unable to generate initializer")
if is_const:
comments.append("const")
# Float & string constants are almost always inlined and can be omitted
if sym.is_string_constant():
continue
if array_dim is None and sym.type.is_likely_float():
continue
# In "none" mode, do not emit any decls
if decls == Options.GlobalDeclsEnum.NONE:
continue
# In modes except "all", skip the decl if the context file already had an initializer
if decls != Options.GlobalDeclsEnum.ALL and sym.initializer_in_typemap:
continue
# In modes except "all", skip vtable decls when compiling C++
if (
decls != Options.GlobalDeclsEnum.ALL
and self.target.language == Target.LanguageEnum.CXX
and name.startswith("__vt__")
):
continue
if (
sym.type.is_function()
and decls != Options.GlobalDeclsEnum.ALL
and name in self.local_functions
and name not in forward_declares_needed
):
continue
qualifier = f"{qualifier} " if qualifier else ""
value = f" = {value}" if value else ""
lines.append(
(
sort_order,
fmt.with_comments(
f"{qualifier}{sym.type.to_decl(name, fmt)}{value};",
comments,
)
+ "\n",
)
)
lines.sort()
return "".join(line for _, line in lines)
def narrow_func_call_outputs(
function: Function,
global_info: GlobalInfo,
) -> None:
"""
Modify the `outputs` list of function call Instructions using the context file.
For now, this only handles known-void functions, but in the future it could
be extended to select a specific register subset based on type.
"""
for instr in function.body:
if (
isinstance(instr, Instruction)
and isinstance(instr.function_target, AsmGlobalSymbol)
and global_info.is_function_known_void(instr.function_target.symbol_name)
):
instr.outputs.clear()
def translate_to_ast(
function: Function,
flow_graph: FlowGraph,
options: Options,
global_info: GlobalInfo,
) -> FunctionInfo:
"""
Given a function, produce a FlowGraph that both contains control-flow
information and has AST transformations for each block of code and
branch condition.
"""
# Initialize info about the function.
stack_info = get_stack_info(function, global_info, flow_graph)
start_regs: RegInfo = RegInfo(stack_info=stack_info)
arch = global_info.arch
start_regs[arch.stack_pointer_reg] = GlobalSymbol("sp", type=Type.ptr())
for reg in arch.saved_regs:
start_regs[reg] = stack_info.saved_reg_symbol(reg.register_name)
fn_sym = global_info.address_of_gsym(function.name).expr
assert isinstance(fn_sym, GlobalSymbol)
fn_type = fn_sym.type
fn_type.unify(Type.function())
fn_sig = Type.ptr(fn_type).get_function_pointer_signature()
assert fn_sig is not None, "fn_type is known to be a function"
return_type = fn_sig.return_type
stack_info.is_variadic = fn_sig.is_variadic
def make_arg(offset: int, type: Type) -> PassedInArg:
assert offset % 4 == 0
return PassedInArg(offset, copied=False, stack_info=stack_info, type=type)
abi = arch.function_abi(
fn_sig,
likely_regs={reg: True for reg in arch.argument_regs},
for_call=False,
)
for slot in abi.arg_slots:
stack_info.add_known_param(slot.offset, slot.name, slot.type)
if slot.reg is not None:
start_regs.set_with_meta(
slot.reg, make_arg(slot.offset, slot.type), RegMeta(uninteresting=True)
)
for slot in abi.possible_slots:
if slot.reg is not None:
start_regs.set_with_meta(
slot.reg, make_arg(slot.offset, slot.type), RegMeta(uninteresting=True)
)
if options.reg_vars == ["saved"]:
reg_vars = arch.saved_regs
elif options.reg_vars == ["most"]:
reg_vars = arch.saved_regs + arch.simple_temp_regs
elif options.reg_vars == ["all"]:
reg_vars = arch.saved_regs + arch.simple_temp_regs + arch.argument_regs
else:
reg_vars = [
stack_info.function.reg_formatter.parse(x, arch) for x in options.reg_vars
]
for reg in reg_vars:
reg_name = stack_info.function.reg_formatter.format(reg)
stack_info.add_register_var(reg, reg_name)
if options.debug:
print(stack_info)
print("\nNow, we attempt to translate:")
used_phis: List[PhiExpr] = []
return_blocks: List[BlockInfo] = []
translate_graph_from_block(
flow_graph.entry_node(),
start_regs,
stack_info,
used_phis,
return_blocks,
options,
)
for reg in arch.base_return_regs:
propagate_register_meta(flow_graph.nodes, reg)
return_reg: Optional[Register] = None
if not options.void and not return_type.is_void():
return_reg = determine_return_register(
return_blocks, fn_sym.type_provided, arch
)
if return_reg is not None:
for b in return_blocks:
if return_reg in b.final_register_states:
ret_val = b.final_register_states[return_reg]
ret_val = as_type(ret_val, return_type, True)
ret_val.use()
b.return_value = ret_val
else:
return_type.unify(Type.void())
if not fn_sig.params_known:
while len(fn_sig.params) < len(stack_info.arguments):
fn_sig.params.append(FunctionParam())
for param, arg in zip(fn_sig.params, stack_info.arguments):
param.type.unify(arg.type)
if not param.name:
param.name = arg.format(Formatter())
assign_phis(used_phis, stack_info)
resolve_types_late(stack_info)
if options.pdb_translate:
import pdb
v: Dict[str, object] = {}
fmt = Formatter()
for local in stack_info.local_vars:
var_name = local.format(fmt)
v[var_name] = local
for temp in stack_info.temp_vars:
if temp.need_decl():
var_name = temp.expr.var.format(fmt)
v[var_name] = temp.expr
for phi in stack_info.phi_vars:
assert phi.name is not None
v[phi.name] = phi
pdb.set_trace()
return FunctionInfo(stack_info, flow_graph, return_type, fn_sym)
| |
query_plan.go
|
/*
Copyright 2021 The Vitess Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package vexec
import (
"context"
"fmt"
"sync"
"vitess.io/vitess/go/vt/concurrency"
"vitess.io/vitess/go/vt/log"
"vitess.io/vitess/go/vt/sqlparser"
"vitess.io/vitess/go/vt/topo"
"vitess.io/vitess/go/vt/vterrors"
"vitess.io/vitess/go/vt/vttablet/tmclient"
querypb "vitess.io/vitess/go/vt/proto/query"
)
// QueryPlan wraps a planned query produced by a QueryPlanner. It is safe to
// execute a QueryPlan repeatedly and in multiple goroutines.
type QueryPlan struct {
ParsedQuery *sqlparser.ParsedQuery
workflow string
tmc tmclient.TabletManagerClient
}
// Execute executes a QueryPlan on a single target.
func (qp *QueryPlan) Execute(ctx context.Context, target *topo.TabletInfo) (qr *querypb.QueryResult, err error) {
if qp.ParsedQuery == nil {
return nil, fmt.Errorf("%w: call PlanQuery on a query planner first", ErrUnpreparedQuery)
}
targetAliasStr := target.AliasString()
log.Infof("Running %v on %v", qp.ParsedQuery.Query, targetAliasStr)
defer func() {
if err != nil {
log.Warningf("Result on %v: %v", targetAliasStr, err)
return
}
|
qr, err = qp.tmc.VReplicationExec(ctx, target.Tablet, qp.ParsedQuery.Query)
if err != nil {
return nil, err
}
if qr.RowsAffected == 0 {
log.Infof("no matching streams found for workflows %s, tablet %s, query %s", qp.workflow, targetAliasStr, qp.ParsedQuery.Query)
}
return qr, nil
}
// ExecuteScatter executes a QueryPlan on multiple targets concurrently,
// returning a mapping of target tablet to querypb.QueryResult. Errors from
// individual targets are aggregated into a singular error.
func (qp *QueryPlan) ExecuteScatter(ctx context.Context, targets ...*topo.TabletInfo) (map[*topo.TabletInfo]*querypb.QueryResult, error) {
if qp.ParsedQuery == nil {
// This check is an "optimization" on error handling. We check here,
// even though we will check this during the individual Execute calls,
// so that we return one error, rather than the same error aggregated
// len(targets) times.
return nil, fmt.Errorf("%w: call PlanQuery on a query planner first", ErrUnpreparedQuery)
}
var (
m sync.Mutex
wg sync.WaitGroup
rec concurrency.AllErrorRecorder
results = make(map[*topo.TabletInfo]*querypb.QueryResult, len(targets))
)
for _, target := range targets {
wg.Add(1)
go func(ctx context.Context, target *topo.TabletInfo) {
defer wg.Done()
qr, err := qp.Execute(ctx, target)
if err != nil {
rec.RecordError(err)
return
}
m.Lock()
defer m.Unlock()
results[target] = qr
}(ctx, target)
}
wg.Wait()
return results, rec.AggrError(vterrors.Aggregate)
}
|
log.Infof("Result on %v: %v", targetAliasStr, qr)
}()
|
get_toolchain_if_necessary.py
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Downloads and unpacks a toolchain for building on Windows. The contents are
matched by sha1 which will be updated when the toolchain is updated.
Having a toolchain script in depot_tools means that it's not versioned
directly with the source code. That is, if the toolchain is upgraded, but
you're trying to build an historical version of Chromium from before the
toolchain upgrade, this will cause you to build with a newer toolchain than
was available when that code was committed. This is done for a two main
reasons: 1) it would likely be annoying to have the up-to-date toolchain
removed and replaced by one without a service pack applied); 2) it would
require maintaining scripts that can build older not-up-to-date revisions of
the toolchain. This is likely to be a poorly tested code path that probably
won't be properly maintained. See http://crbug.com/323300.
This does not extend to major versions of the toolchain however, on the
assumption that there are more likely to be source incompatibilities between
major revisions. This script calls a subscript (currently, toolchain2013.py)
to do the main work. It is expected that toolchain2013.py will always be able
to acquire/build the most current revision of a VS2013-based toolchain. In the
future when a hypothetical VS2015 is released, the 2013 script will be
maintained, and a new 2015 script would be added.
"""
import hashlib
import json
import optparse
import os
import platform
import shutil
import subprocess
import sys
import tempfile
import time
import zipfile
# Environment variable that, if set, specifies the default Visual Studio
# toolchain root directory to use.
ENV_TOOLCHAIN_ROOT = 'DEPOT_TOOLS_WIN_TOOLCHAIN_ROOT'
# winreg isn't natively available under CygWin
if sys.platform == "win32":
try:
import winreg
except ImportError:
import _winreg as winreg
elif sys.platform == "cygwin":
try:
import cygwinreg as winreg
except ImportError:
print ''
print 'CygWin does not natively support winreg but a replacement exists.'
print 'https://pypi.python.org/pypi/cygwinreg/'
print ''
print 'Try: easy_install cygwinreg'
print ''
raise
|
DEPOT_TOOLS_PATH = os.path.join(BASEDIR, '..')
sys.path.append(DEPOT_TOOLS_PATH)
try:
import download_from_google_storage
except ImportError:
# Allow use of utility functions in this script from package_from_installed
# on bare VM that doesn't have a full depot_tools.
pass
def GetFileList(root):
"""Gets a normalized list of files under |root|."""
assert not os.path.isabs(root)
assert os.path.normpath(root) == root
file_list = []
# Ignore WER ReportQueue entries that vctip/cl leave in the bin dir if/when
# they crash. Also ignores the content of the win_sdk/debuggers/x(86|64)/sym/
# directories as this is just the temporarily location that Windbg might use
# to store the symbol files.
#
# Note: These files are only created on a Windows host, so the
# ignored_directories list isn't relevant on non-Windows hosts.
ignored_directories = ['wer\\reportqueue',
'win_sdk\\debuggers\\x86\\sym\\',
'win_sdk\\debuggers\\x64\\sym\\']
for base, _, files in os.walk(root):
paths = [os.path.join(base, f) for f in files]
for p in paths:
if any(ignored_dir in p.lower() for ignored_dir in ignored_directories):
continue
file_list.append(p)
return sorted(file_list, key=lambda s: s.replace('/', '\\').lower())
def MakeTimestampsFileName(root, sha1):
return os.path.join(root, os.pardir, '%s.timestamps' % sha1)
def CalculateHash(root, expected_hash):
"""Calculates the sha1 of the paths to all files in the given |root| and the
contents of those files, and returns as a hex string.
|expected_hash| is the expected hash value for this toolchain if it has
already been installed.
"""
if expected_hash:
full_root_path = os.path.join(root, expected_hash)
else:
full_root_path = root
file_list = GetFileList(full_root_path)
# Check whether we previously saved timestamps in $root/../{sha1}.timestamps.
# If we didn't, or they don't match, then do the full calculation, otherwise
# return the saved value.
timestamps_file = MakeTimestampsFileName(root, expected_hash)
timestamps_data = {'files': [], 'sha1': ''}
if os.path.exists(timestamps_file):
with open(timestamps_file, 'rb') as f:
try:
timestamps_data = json.load(f)
except ValueError:
# json couldn't be loaded, empty data will force a re-hash.
pass
matches = len(file_list) == len(timestamps_data['files'])
# Don't check the timestamp of the version file as we touch this file to
# indicates which versions of the toolchain are still being used.
vc_dir = os.path.join(full_root_path, 'VC').lower()
if matches:
for disk, cached in zip(file_list, timestamps_data['files']):
if disk != cached[0] or (
disk != vc_dir and os.path.getmtime(disk) != cached[1]):
matches = False
break
elif os.path.exists(timestamps_file):
# Print some information about the extra/missing files. Don't do this if we
# don't have a timestamp file, as all the files will be considered as
# missing.
timestamps_data_files = []
for f in timestamps_data['files']:
timestamps_data_files.append(f[0])
missing_files = [f for f in timestamps_data_files if f not in file_list]
if len(missing_files):
print ('%d files missing from the %s version of the toolchain:' %
(len(missing_files), expected_hash))
for f in missing_files[:10]:
print '\t%s' % f
if len(missing_files) > 10:
print '\t...'
extra_files = [f for f in file_list if f not in timestamps_data_files]
if len(extra_files):
print ('%d extra files in the %s version of the toolchain:' %
(len(extra_files), expected_hash))
for f in extra_files[:10]:
print '\t%s' % f
if len(extra_files) > 10:
print '\t...'
if matches:
return timestamps_data['sha1']
# Make long hangs when updating the toolchain less mysterious.
print 'Calculating hash of toolchain in %s. Please wait...' % full_root_path
sys.stdout.flush()
digest = hashlib.sha1()
for path in file_list:
path_without_hash = str(path).replace('/', '\\')
if expected_hash:
path_without_hash = path_without_hash.replace(
os.path.join(root, expected_hash).replace('/', '\\'), root)
digest.update(path_without_hash.lower())
with open(path, 'rb') as f:
digest.update(f.read())
# Save the timestamp file if the calculated hash is the expected one.
if digest.hexdigest() == expected_hash:
SaveTimestampsAndHash(root, digest.hexdigest())
return digest.hexdigest()
def CalculateToolchainHashes(root, remove_corrupt_toolchains):
"""Calculate the hash of the different toolchains installed in the |root|
directory."""
hashes = []
dir_list = [
d for d in os.listdir(root) if os.path.isdir(os.path.join(root, d))]
for d in dir_list:
toolchain_hash = CalculateHash(root, d)
if toolchain_hash != d:
print ('The hash of a version of the toolchain has an unexpected value ('
'%s instead of %s)%s.' % (toolchain_hash, d,
', removing it' if remove_corrupt_toolchains else ''))
if remove_corrupt_toolchains:
RemoveToolchain(root, d, True)
else:
hashes.append(toolchain_hash)
return hashes
def SaveTimestampsAndHash(root, sha1):
"""Saves timestamps and the final hash to be able to early-out more quickly
next time."""
file_list = GetFileList(os.path.join(root, sha1))
timestamps_data = {
'files': [[f, os.path.getmtime(f)] for f in file_list],
'sha1': sha1,
}
with open(MakeTimestampsFileName(root, sha1), 'wb') as f:
json.dump(timestamps_data, f)
def HaveSrcInternalAccess():
"""Checks whether access to src-internal is available."""
with open(os.devnull, 'w') as nul:
# This is required to avoid modal dialog boxes after Git 2.14.1 and Git
# Credential Manager for Windows 1.12. See https://crbug.com/755694 and
# https://github.com/Microsoft/Git-Credential-Manager-for-Windows/issues/482.
child_env = dict(os.environ, GCM_INTERACTIVE='NEVER')
return subprocess.call(
['git', '-c', 'core.askpass=true', 'remote', 'show',
'https://chrome-internal.googlesource.com/chrome/src-internal/'],
shell=True, stdin=nul, stdout=nul, stderr=nul, env=child_env) == 0
def LooksLikeGoogler():
"""Checks for a USERDOMAIN environment variable of 'GOOGLE', which
probably implies the current user is a Googler."""
return os.environ.get('USERDOMAIN', '').upper() == 'GOOGLE'
def CanAccessToolchainBucket():
"""Checks whether the user has access to gs://chrome-wintoolchain/."""
gsutil = download_from_google_storage.Gsutil(
download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
code, _, _ = gsutil.check_call('ls', 'gs://chrome-wintoolchain/')
return code == 0
def ToolchainBaseURL():
base_url = os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL', '')
if base_url.startswith('file://'):
base_url = base_url[len('file://'):]
return base_url
def UsesToolchainFromFile():
return os.path.isdir(ToolchainBaseURL())
def UsesToolchainFromHttp():
url = ToolchainBaseURL()
return url.startswith('http://') or url.startswith('https://')
def RequestGsAuthentication():
"""Requests that the user authenticate to be able to access gs:// as a
Googler. This allows much faster downloads, and pulling (old) toolchains
that match src/ revisions.
"""
print 'Access to gs://chrome-wintoolchain/ not configured.'
print '-----------------------------------------------------------------'
print
print 'You appear to be a Googler.'
print
print 'I\'m sorry for the hassle, but you need to do a one-time manual'
print 'authentication. Please run:'
print
print ' download_from_google_storage --config'
print
print 'and follow the instructions.'
print
print 'NOTE 1: Use your google.com credentials, not chromium.org.'
print 'NOTE 2: Enter 0 when asked for a "project-id".'
print
print '-----------------------------------------------------------------'
print
sys.stdout.flush()
sys.exit(1)
def DelayBeforeRemoving(target_dir):
"""A grace period before deleting the out of date toolchain directory."""
if (os.path.isdir(target_dir) and
not bool(int(os.environ.get('CHROME_HEADLESS', '0')))):
for i in range(9, 0, -1):
sys.stdout.write(
'\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
sys.stdout.flush()
time.sleep(1)
print
def DownloadUsingHttp(filename):
"""Downloads the given file from a url defined in
DEPOT_TOOLS_WIN_TOOLCHAIN_BASE_URL environment variable."""
import urlparse
import urllib2
from contextlib import closing
temp_dir = tempfile.mkdtemp()
assert os.path.basename(filename) == filename
target_path = os.path.join(temp_dir, filename)
base_url = ToolchainBaseURL()
src_url = urlparse.urljoin(base_url, filename)
try:
with closing(urllib2.urlopen(src_url)) as fsrc, \
open(target_path, 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst)
except urllib2.URLError as e:
RmDir(temp_dir)
sys.exit('Failed to retrieve file: %s' % e)
return temp_dir, target_path
def DownloadUsingGsutil(filename):
"""Downloads the given file from Google Storage chrome-wintoolchain bucket."""
temp_dir = tempfile.mkdtemp()
assert os.path.basename(filename) == filename
target_path = os.path.join(temp_dir, filename)
gsutil = download_from_google_storage.Gsutil(
download_from_google_storage.GSUTIL_DEFAULT_PATH, boto_path=None)
code = gsutil.call('cp', 'gs://chrome-wintoolchain/' + filename, target_path)
if code != 0:
sys.exit('gsutil failed')
return temp_dir, target_path
def RmDir(path):
"""Deletes path and all the files it contains."""
if sys.platform != 'win32':
shutil.rmtree(path, ignore_errors=True)
else:
# shutil.rmtree() doesn't delete read-only files on Windows.
subprocess.check_call('rmdir /s/q "%s"' % path, shell=True)
def DoTreeMirror(target_dir, tree_sha1):
"""In order to save temporary space on bots that do not have enough space to
download ISOs, unpack them, and copy to the target location, the whole tree
is uploaded as a zip to internal storage, and then mirrored here."""
if UsesToolchainFromFile():
temp_dir = None
local_zip = os.path.join(ToolchainBaseURL(), tree_sha1 + '.zip')
if not os.path.isfile(local_zip):
sys.exit('%s is not a valid file.' % local_zip)
elif UsesToolchainFromHttp():
temp_dir, local_zip = DownloadUsingHttp(tree_sha1 + '.zip')
else:
temp_dir, local_zip = DownloadUsingGsutil(tree_sha1 + '.zip')
sys.stdout.write('Extracting %s...\n' % local_zip)
sys.stdout.flush()
with zipfile.ZipFile(local_zip, 'r', zipfile.ZIP_DEFLATED, True) as zf:
zf.extractall(target_dir)
if temp_dir:
RmDir(temp_dir)
def RemoveToolchain(root, sha1, delay_before_removing):
"""Remove the |sha1| version of the toolchain from |root|."""
toolchain_target_dir = os.path.join(root, sha1)
if delay_before_removing:
DelayBeforeRemoving(toolchain_target_dir)
if sys.platform == 'win32':
# These stay resident and will make the rmdir below fail.
kill_list = [
'mspdbsrv.exe',
'vctip.exe', # Compiler and tools experience improvement data uploader.
]
for process_name in kill_list:
with open(os.devnull, 'wb') as nul:
subprocess.call(['taskkill', '/f', '/im', process_name],
stdin=nul, stdout=nul, stderr=nul)
if os.path.isdir(toolchain_target_dir):
RmDir(toolchain_target_dir)
timestamp_file = MakeTimestampsFileName(root, sha1)
if os.path.exists(timestamp_file):
os.remove(timestamp_file)
def RemoveUnusedToolchains(root):
"""Remove the versions of the toolchain that haven't been used recently."""
valid_toolchains = []
dirs_to_remove = []
for d in os.listdir(root):
full_path = os.path.join(root, d)
if os.path.isdir(full_path):
if not os.path.exists(MakeTimestampsFileName(root, d)):
dirs_to_remove.append(d)
else:
vc_dir = os.path.join(full_path, 'VC')
valid_toolchains.append((os.path.getmtime(vc_dir), d))
elif os.path.isfile(full_path):
os.remove(full_path)
for d in dirs_to_remove:
print ('Removing %s as it doesn\'t correspond to any known toolchain.' %
os.path.join(root, d))
# Use the RemoveToolchain function to remove these directories as they might
# contain an older version of the toolchain.
RemoveToolchain(root, d, False)
# Remove the versions of the toolchains that haven't been used in the past 30
# days.
toolchain_expiration_time = 60 * 60 * 24 * 30
for toolchain in valid_toolchains:
toolchain_age_in_sec = time.time() - toolchain[0]
if toolchain_age_in_sec > toolchain_expiration_time:
print ('Removing version %s of the Win toolchain has it hasn\'t been used'
' in the past %d days.' % (toolchain[1],
toolchain_age_in_sec / 60 / 60 / 24))
RemoveToolchain(root, toolchain[1], True)
def EnableCrashDumpCollection():
"""Tell Windows Error Reporting to record crash dumps so that we can diagnose
linker crashes and other toolchain failures. Documented at:
https://msdn.microsoft.com/en-us/library/windows/desktop/bb787181.aspx
"""
if sys.platform == 'win32' and os.environ.get('CHROME_HEADLESS') == '1':
key_name = r'SOFTWARE\Microsoft\Windows\Windows Error Reporting'
try:
key = winreg.CreateKeyEx(winreg.HKEY_LOCAL_MACHINE, key_name, 0,
winreg.KEY_WOW64_64KEY | winreg.KEY_ALL_ACCESS)
# Merely creating LocalDumps is sufficient to enable the defaults.
winreg.CreateKey(key, "LocalDumps")
# Disable the WER UI, as documented here:
# https://msdn.microsoft.com/en-us/library/windows/desktop/bb513638.aspx
winreg.SetValueEx(key, "DontShowUI", 0, winreg.REG_DWORD, 1)
# Trap OSError instead of WindowsError so pylint will succeed on Linux.
# Catching errors is important because some build machines are not elevated
# and writing to HKLM requires elevation.
except OSError:
pass
def main():
parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
parser.add_option('--output-json', metavar='FILE',
help='write information about toolchain to FILE')
parser.add_option('--force', action='store_true',
help='force script to run on non-Windows hosts')
parser.add_option('--toolchain-dir',
default=os.getenv(ENV_TOOLCHAIN_ROOT, BASEDIR),
help='directory to install toolchain into')
options, args = parser.parse_args()
if not (sys.platform.startswith(('cygwin', 'win32')) or options.force):
return 0
if sys.platform == 'cygwin':
# This script requires Windows Python, so invoke with depot_tools' Python.
def winpath(path):
return subprocess.check_output(['cygpath', '-w', path]).strip()
python = os.path.join(DEPOT_TOOLS_PATH, 'python.bat')
cmd = [python, winpath(__file__)]
if options.output_json:
cmd.extend(['--output-json', winpath(options.output_json)])
cmd.extend(args)
sys.exit(subprocess.call(cmd))
assert sys.platform != 'cygwin'
if len(args) == 0:
sys.exit('Desired hash is required.')
desired_hash = args[0]
# Create our toolchain destination and "chdir" to it.
toolchain_dir = os.path.abspath(options.toolchain_dir)
if not os.path.isdir(toolchain_dir):
os.makedirs(toolchain_dir)
os.chdir(toolchain_dir)
# Move to depot_tools\win_toolchain where we'll store our files, and where
# the downloader script is.
if os.environ.get('GYP_MSVS_VERSION') == '2013':
target_dir = 'vs2013_files'
else:
target_dir = 'vs_files'
if not os.path.isdir(target_dir):
os.mkdir(target_dir)
toolchain_target_dir = os.path.join(target_dir, desired_hash)
abs_toolchain_target_dir = os.path.abspath(toolchain_target_dir)
got_new_toolchain = False
# If the current hash doesn't match what we want in the file, nuke and pave.
# Typically this script is only run when the .sha1 one file is updated, but
# directly calling "gclient runhooks" will also run it, so we cache
# based on timestamps to make that case fast.
current_hashes = CalculateToolchainHashes(target_dir, True)
if desired_hash not in current_hashes:
should_use_file = False
should_use_http = False
should_use_gs = False
if UsesToolchainFromFile():
should_use_file = True
elif UsesToolchainFromHttp():
should_use_http = True
elif (HaveSrcInternalAccess() or
LooksLikeGoogler() or
CanAccessToolchainBucket()):
should_use_gs = True
if not CanAccessToolchainBucket():
RequestGsAuthentication()
if not should_use_file and not should_use_gs and not should_use_http:
if sys.platform not in ('win32', 'cygwin'):
doc = 'https://chromium.googlesource.com/chromium/src/+/master/docs/' \
'win_cross.md'
else:
doc = 'https://chromium.googlesource.com/chromium/src/+/master/docs/' \
'windows_build_instructions.md'
print('\n\n\nPlease follow the instructions at %s\n\n' % doc)
return 1
print('Windows toolchain out of date or doesn\'t exist, updating (Pro)...')
print(' current_hashes: %s' % ', '.join(current_hashes))
print(' desired_hash: %s' % desired_hash)
sys.stdout.flush()
DoTreeMirror(toolchain_target_dir, desired_hash)
got_new_toolchain = True
win_sdk = os.path.join(abs_toolchain_target_dir, 'win_sdk')
try:
version_file = os.path.join(toolchain_target_dir, 'VS_VERSION')
vc_dir = os.path.join(toolchain_target_dir, 'VC')
with open(version_file, 'rb') as f:
vs_version = f.read().strip()
# Touch the VC directory so we can use its timestamp to know when this
# version of the toolchain has been used for the last time.
os.utime(vc_dir, None)
except IOError:
# Older toolchains didn't have the VS_VERSION file, and used 'win8sdk'
# instead of just 'win_sdk'.
vs_version = '2013'
win_sdk = os.path.join(abs_toolchain_target_dir, 'win8sdk')
data = {
'path': abs_toolchain_target_dir,
'version': vs_version,
'win_sdk': win_sdk,
# Added for backwards compatibility with old toolchain packages.
'win8sdk': win_sdk,
'wdk': os.path.join(abs_toolchain_target_dir, 'wdk'),
'runtime_dirs': [
os.path.join(abs_toolchain_target_dir, 'sys64'),
os.path.join(abs_toolchain_target_dir, 'sys32'),
],
}
with open(os.path.join(target_dir, '..', 'data.json'), 'w') as f:
json.dump(data, f)
if got_new_toolchain:
current_hashes = CalculateToolchainHashes(target_dir, False)
if desired_hash not in current_hashes:
print >> sys.stderr, (
'Got wrong hash after pulling a new toolchain. '
'Wanted \'%s\', got one of \'%s\'.' % (
desired_hash, ', '.join(current_hashes)))
return 1
SaveTimestampsAndHash(target_dir, desired_hash)
if options.output_json:
shutil.copyfile(os.path.join(target_dir, '..', 'data.json'),
options.output_json)
EnableCrashDumpCollection()
RemoveUnusedToolchains(target_dir)
return 0
if __name__ == '__main__':
sys.exit(main())
|
BASEDIR = os.path.dirname(os.path.abspath(__file__))
|
route.go
|
/*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package trait
import (
"fmt"
"reflect"
"github.com/apache/camel-k/pkg/apis/camel/v1alpha1"
routev1 "github.com/openshift/api/route/v1"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
type routeTrait struct {
BaseTrait `property:",squash"`
Host string `property:"host"`
TLSTermination string `property:"tls-termination"`
TLSCertificate string `property:"tls-certificate"`
TLSKey string `property:"tls-key"`
TLSCACertificate string `property:"tls-ca-certificate"`
TLSDestinationCACertificate string `property:"tls-destination-ca-certificate"`
TLSInsecureEdgeTerminationPolicy string `property:"tls-insecure-edge-termination-policy"`
service *corev1.Service
}
func
|
() *routeTrait {
return &routeTrait{
BaseTrait: newBaseTrait("route"),
}
}
func (t *routeTrait) Configure(e *Environment) (bool, error) {
if t.Enabled != nil && !*t.Enabled {
e.Integration.Status.SetCondition(
v1alpha1.IntegrationConditionExposureAvailable,
corev1.ConditionFalse,
v1alpha1.IntegrationConditionRouteNotAvailableReason,
"explicitly disabled",
)
return false, nil
}
if !e.IntegrationInPhase(v1alpha1.IntegrationPhaseDeploying) {
return false, nil
}
t.service = e.Resources.GetUserServiceForIntegration(e.Integration)
if t.service == nil {
e.Integration.Status.SetCondition(
v1alpha1.IntegrationConditionExposureAvailable,
corev1.ConditionFalse,
v1alpha1.IntegrationConditionRouteNotAvailableReason,
"no target service found",
)
return false, nil
}
return true, nil
}
func (t *routeTrait) Apply(e *Environment) error {
servicePortName := httpPortName
dt := e.Catalog.GetTrait(containerTraitID)
if dt != nil {
servicePortName = dt.(*containerTrait).ServicePortName
}
route := routev1.Route{
TypeMeta: metav1.TypeMeta{
Kind: "Route",
APIVersion: routev1.SchemeGroupVersion.String(),
},
ObjectMeta: metav1.ObjectMeta{
Name: t.service.Name,
Namespace: t.service.Namespace,
},
Spec: routev1.RouteSpec{
Port: &routev1.RoutePort{
TargetPort: intstr.FromString(servicePortName),
},
To: routev1.RouteTargetReference{
Kind: "Service",
Name: t.service.Name,
},
Host: t.Host,
TLS: t.getTLSConfig(),
},
}
e.Resources.Add(&route)
var message string
if t.Host == "" {
message = fmt.Sprintf("%s -> %s(%s)",
route.Name,
route.Spec.To.Name,
route.Spec.Port.TargetPort.String())
} else {
message = fmt.Sprintf("%s(%s) -> %s(%s)",
route.Name,
t.Host,
route.Spec.To.Name,
route.Spec.Port.TargetPort.String())
}
e.Integration.Status.SetCondition(
v1alpha1.IntegrationConditionExposureAvailable,
corev1.ConditionTrue,
v1alpha1.IntegrationConditionRouteAvailableReason,
message,
)
return nil
}
func (t *routeTrait) getTLSConfig() *routev1.TLSConfig {
config := routev1.TLSConfig{
Termination: routev1.TLSTerminationType(t.TLSTermination),
Certificate: t.TLSCertificate,
Key: t.TLSKey,
CACertificate: t.TLSCACertificate,
DestinationCACertificate: t.TLSDestinationCACertificate,
InsecureEdgeTerminationPolicy: routev1.InsecureEdgeTerminationPolicyType(t.TLSInsecureEdgeTerminationPolicy),
}
if reflect.DeepEqual(config, routev1.TLSConfig{}) {
return nil
}
return &config
}
|
newRouteTrait
|
package.py
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class
|
(RPackage):
"""A package that extends and improves the functionality of
the base affy package. Routines that make heavy use of compiled
code for speed. Central focus is on implementation of methods
for fitting probe-level models and tools using these models.
PLM based quality assessment tools."""
homepage = "https://www.bioconductor.org/packages/affyPLM/"
url = "https://git.bioconductor.org/packages/affyPLM"
version('1.52.1', git='https://git.bioconductor.org/packages/affyPLM', commit='e8613a6018c4ee58045df6bf19128844f50a1f43')
depends_on('[email protected]:3.4.9', when='@1.52.1')
depends_on('r-biocgenerics', type=('build', 'run'))
depends_on('r-affy', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-gcrma', type=('build', 'run'))
depends_on('r-preprocesscore', type=('build', 'run'))
depends_on('r-zlibbioc', type=('build', 'run'))
|
RAffyplm
|
lib.rs
|
#![no_std]
#![feature(default_alloc_error_handler)]
extern crate alloc;
mod c_allocator;
use core::{panic::PanicInfo};
use alloc::{format, string::String, vec::Vec};
use c_allocator::CAllocator;
mod interface;
mod operating_system;
mod rbop_impl;
mod applications;
mod filesystem;
mod timer;
mod multi_tap;
use interface::framework;
use operating_system::os;
use crate::{interface::Colour, operating_system::{OSInput, OperatingSystemInterface}};
#[global_allocator]
static ALLOCATOR: CAllocator = CAllocator;
#[panic_handler]
fn panic(info: &PanicInfo) -> ! {
framework().display.switch_to_screen();
framework().display.fill_screen(Colour::BLACK);
// Draw panic title bar
framework().display.draw_rect(
0, 0, framework().display.width as i64, OperatingSystemInterface::TITLE_BAR_HEIGHT,
Colour::RED, interface::ShapeFill::Filled, 0,
);
framework().display.print_at(5, 7, "Panic :(");
// Draw error text
let (lines, line_height, _) =
framework().display.wrap_text(&format!("{}", info), framework().display.width as i64 - 20);
for (i, line) in lines.iter().enumerate() {
framework().display.print_at(
10, OperatingSystemInterface::TITLE_BAR_HEIGHT + 5 + line_height * i as i64,
line
);
}
// Draw keys
framework().display.print_at(
0, framework().display.height as i64 - 50, "Restart the device, or use\n[EXE] to enter bootloader"
);
framework().display.draw();
loop {
if let Some(OSInput::Exe) = framework().buttons.wait_press() {
os().reboot_into_bootloader();
}
}
}
fn debug(info: String) {
let mut message_bytes = info.as_bytes().iter().cloned().collect::<Vec<_>>();
message_bytes.push(0);
(framework().debug_handler)(message_bytes.as_ptr());
}
#[no_mangle]
pub extern "C" fn
|
() {
debug("Rust main!".into());
os().application_list.add::<applications::calculator::CalculatorApplication>();
os().application_list.add::<applications::graph::GraphApplication>();
os().application_list.add::<applications::numbers_game::NumbersGame>();
os().application_list.add::<applications::files::FilesApplication>();
os().application_list.add::<applications::about::AboutApplication>();
os().application_list.add::<applications::settings::SettingsApplication>();
os().application_list.add::<applications::storage::StorageApplication>();
os().application_list.add::<applications::bootloader::BootloaderApplication>();
if !(framework().storage.connected)() {
os().ui_text_dialog("Unable to communicate with storage.");
}
// Show a splash screen while we load storage
framework().display.fill_screen(Colour::BLACK);
framework().display.draw_bitmap(60, 80, "splash");
framework().display.draw();
// Temporary
framework().storage.with_priority(|| {
// We use `leak` to ensure `fat` doesn't get dropped at the end of this `with_priority` call
let fat = os().filesystem.fat.read_all().unwrap();
framework().usb_mass_storage.fat12_filesystem = fat.leak().as_mut_ptr();
(framework().usb_mass_storage.begin)();
});
loop {
os().application_to_tick().tick();
}
}
|
delta_pico_main
|
test_output_base.py
|
"""
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# pylint: disable=abstract-class-instantiated,protected-access,attribute-defined-outside-init
from mock import Mock, patch, MagicMock
from moto import mock_kms, mock_ssm
from nose.tools import (
assert_equal,
assert_is_instance,
assert_is_not_none,
assert_is_none,
assert_count_equal
)
from requests.exceptions import Timeout as ReqTimeout
from streamalert.alert_processor.outputs.output_base import (
OutputDispatcher,
OutputProperty,
OutputRequestFailure,
StreamAlertOutput
)
from streamalert.alert_processor.outputs.aws import S3Output
from tests.unit.streamalert.alert_processor import (
CONFIG,
KMS_ALIAS,
MOCK_ENV,
REGION,
PREFIX
)
from tests.unit.streamalert.alert_processor.helpers import (
put_mock_ssm_parameters
)
def test_output_property_default():
"""OutputProperty defaults"""
prop = OutputProperty()
assert_equal(prop.description, '')
assert_equal(prop.value, '')
assert_equal(prop.input_restrictions, {' ', ':'})
assert_equal(prop.mask_input, False)
assert_equal(prop.cred_requirement, False)
def test_get_dispatcher_good():
"""StreamAlertOutput - Get Valid Dispatcher"""
dispatcher = StreamAlertOutput.get_dispatcher('aws-s3')
assert_is_not_none(dispatcher)
@patch('logging.Logger.error')
def
|
(log_mock):
"""StreamAlertOutput - Get Invalid Dispatcher"""
dispatcher = StreamAlertOutput.get_dispatcher('aws-s4')
assert_is_none(dispatcher)
log_mock.assert_called_with('Designated output service [%s] does not exist', 'aws-s4')
@patch.dict('os.environ', MOCK_ENV)
def test_create_dispatcher():
"""StreamAlertOutput - Create Dispatcher"""
dispatcher = StreamAlertOutput.create_dispatcher('aws-s3', CONFIG)
assert_is_instance(dispatcher, S3Output)
def test_user_defined_properties():
"""OutputDispatcher - User Defined Properties"""
for output in list(StreamAlertOutput.get_all_outputs().values()):
props = output.get_user_defined_properties()
# The user defined properties should at a minimum contain a descriptor
assert_is_not_none(props.get('descriptor'))
def test_output_loading():
"""OutputDispatcher - Loading Output Classes"""
loaded_outputs = set(StreamAlertOutput.get_all_outputs())
# Add new outputs to this list to make sure they're loaded properly
expected_outputs = {
'aws-firehose',
'aws-lambda',
'aws-s3',
'aws-ses',
'aws-sns',
'aws-sqs',
'aws-cloudwatch-log',
'carbonblack',
'demisto',
'github',
'jira',
'komand',
'pagerduty',
'pagerduty-v2',
'pagerduty-incident',
'phantom',
'slack',
'teams'
}
assert_count_equal(loaded_outputs, expected_outputs)
@patch.object(OutputDispatcher, '__service__', 'test_service')
class TestOutputDispatcher:
"""Test class for OutputDispatcher"""
@patch.object(OutputDispatcher, '__service__', 'test_service')
@patch.object(OutputDispatcher, '__abstractmethods__', frozenset())
@patch.dict('os.environ', MOCK_ENV)
def setup(self):
"""Setup before each method"""
self._dispatcher = OutputDispatcher(CONFIG)
self._descriptor = 'desc_test'
@patch.object(OutputDispatcher, '__service__', 'test_service')
@patch.object(OutputDispatcher, '__abstractmethods__', frozenset())
@patch('streamalert.alert_processor.outputs.output_base.OutputCredentialsProvider')
def test_credentials_provider(self, provider_constructor):
"""OutputDispatcher - Constructor"""
provider = MagicMock()
provider_constructor.return_value = provider
_ = OutputDispatcher(CONFIG)
provider_constructor.assert_called_with('test_service',
config=CONFIG, defaults=None, region=REGION)
assert_equal(self._dispatcher._credentials_provider._service_name, 'test_service')
@patch('logging.Logger.info')
def test_log_status_success(self, log_mock):
"""OutputDispatcher - Log status success"""
self._dispatcher._log_status(True, self._descriptor)
log_mock.assert_called_with('Successfully sent alert to %s:%s',
'test_service', self._descriptor)
@patch('logging.Logger.error')
def test_log_status_failed(self, log_mock):
"""OutputDispatcher - Log status failed"""
self._dispatcher._log_status(False, self._descriptor)
log_mock.assert_called_with('Failed to send alert to %s:%s',
'test_service', self._descriptor)
@patch('requests.Response')
def test_check_http_response(self, mock_response):
"""OutputDispatcher - Check HTTP Response"""
# Test with a good response code
mock_response.status_code = 200
result = self._dispatcher._check_http_response(mock_response)
assert_equal(result, True)
# Test with a bad response code
mock_response.status_code = 440
result = self._dispatcher._check_http_response(mock_response)
assert_equal(result, False)
@mock_ssm
@mock_kms
def test_load_creds(self):
"""OutputDispatcher - Load Credentials"""
param_name = '/{}/streamalert/outputs/test_service/desc_test'.format(PREFIX)
creds = {
'url': 'http://www.foo.bar/test',
'token': 'token_to_encrypt'
}
put_mock_ssm_parameters(param_name, creds, KMS_ALIAS, region=REGION)
loaded_creds = self._dispatcher._load_creds(self._descriptor)
assert_is_not_none(loaded_creds)
assert_equal(len(loaded_creds), 2)
assert_equal(loaded_creds['url'], creds['url'])
assert_equal(loaded_creds['token'], creds['token'])
def test_format_output_config(self):
"""OutputDispatcher - Format Output Config"""
with patch.object(OutputDispatcher, '__service__', 'slack'):
props = {'descriptor': OutputProperty('test_desc', 'test_channel')}
formatted = self._dispatcher.format_output_config(CONFIG, props)
assert_equal(len(formatted), 2)
assert_equal(formatted[0], 'unit_test_channel')
assert_equal(formatted[1], 'test_channel')
@patch.object(OutputDispatcher, '_get_exceptions_to_catch', Mock(return_value=(ValueError)))
def test_catch_exceptions_non_default(self):
"""OutputDispatcher - Catch Non Default Exceptions"""
exceptions = self._dispatcher._catch_exceptions()
assert_equal(exceptions, (OutputRequestFailure, ReqTimeout, ValueError))
@patch.object(OutputDispatcher,
'_get_exceptions_to_catch', Mock(return_value=(ValueError, TypeError)))
def test_catch_exceptions_non_default_tuple(self):
"""OutputDispatcher - Catch Non Default Exceptions Tuple"""
exceptions = self._dispatcher._catch_exceptions()
assert_equal(exceptions, (OutputRequestFailure, ReqTimeout, ValueError, TypeError))
@patch.object(OutputDispatcher, '_get_exceptions_to_catch', Mock(return_value=()))
def test_catch_exceptions_default(self):
"""OutputDispatcher - Catch Default Exceptions"""
exceptions = self._dispatcher._catch_exceptions()
assert_equal(exceptions, (OutputRequestFailure, ReqTimeout))
|
test_get_dispatcher_bad
|
podhandler.go
|
package networkpolicy
import (
"fmt"
"sort"
v1 "github.com/rancher/rancher/pkg/generated/norman/core/v1"
v3 "github.com/rancher/rancher/pkg/generated/norman/management.cattle.io/v3"
"github.com/sirupsen/logrus"
corev1 "k8s.io/api/core/v1"
knetworkingv1 "k8s.io/api/networking/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/intstr"
)
const (
// PodNameFieldLabel is used to specify the podName for pods
// with hostPort specified
PodNameFieldLabel = "field.cattle.io/podName"
)
type podHandler struct {
npmgr *netpolMgr
pods v1.PodInterface
clusterLister v3.ClusterLister
clusterNamespace string
}
func (ph *podHandler) Sync(key string, pod *corev1.Pod) (runtime.Object, error) {
if pod == nil || pod.DeletionTimestamp != nil {
return nil, nil
}
disabled, err := isNetworkPolicyDisabled(ph.clusterNamespace, ph.clusterLister)
if err != nil {
return nil, err
}
if disabled {
return nil, nil
}
moved, err := isNamespaceMoved(pod.Namespace, ph.npmgr.nsLister)
if err != nil {
return nil, err
}
if moved {
return nil, nil
}
logrus.Debugf("podHandler: Sync: %+v", *pod)
if err := ph.addLabelIfHostPortsPresent(pod); err != nil {
return nil, err
}
return nil, ph.npmgr.hostPortsUpdateHandler(pod, ph.clusterNamespace)
}
// k8s native network policy can select pods only using labels,
// hence need to add a label which can be used to select this pod
// which has hostPorts
func (ph *podHandler) addLabelIfHostPortsPresent(pod *corev1.Pod) error {
if pod.Labels != nil {
if _, ok := pod.Labels[PodNameFieldLabel]; ok {
return nil
}
}
hasHostPorts := false
Loop:
for _, c := range pod.Spec.Containers {
for _, port := range c.Ports {
if port.HostPort != 0 {
hasHostPorts = true
break Loop
}
}
}
if hasHostPorts {
logrus.Debugf("podHandler: addLabelIfHostPortsPresent: pod=%+v has HostPort", *pod)
podCopy := pod.DeepCopy()
if podCopy.Labels == nil {
podCopy.Labels = map[string]string{}
}
podCopy.Labels[PodNameFieldLabel] = podCopy.Name
_, err := ph.pods.Update(podCopy)
if err != nil {
return err
}
}
return nil
}
func (npmgr *netpolMgr) hostPortsUpdateHandler(pod *corev1.Pod, clusterNamespace string) error {
systemNamespaces, _, err := npmgr.getSystemNSInfo(clusterNamespace)
if err != nil {
return fmt.Errorf("netpolMgr: hostPortsUpdateHandler: getSystemNamespaces: err=%v", err)
}
policyName := getHostPortsPolicyName(pod)
if _, ok := systemNamespaces[pod.Namespace]; ok {
npmgr.delete(pod.Namespace, policyName)
return nil
}
np := generatePodNetworkPolicy(pod, policyName)
hasHostPorts := false
for _, c := range pod.Spec.Containers {
for _, port := range c.Ports {
if port.HostPort != 0 {
hp := intstr.FromInt(int(port.ContainerPort))
proto := corev1.Protocol(port.Protocol)
p := knetworkingv1.NetworkPolicyPort{
Protocol: &proto,
Port: &hp,
}
np.Spec.Ingress[0].Ports = append(np.Spec.Ingress[0].Ports, p)
hasHostPorts = true
}
}
}
if !hasHostPorts {
return nil
}
// sort ports so it always appears in a certain order
sort.Slice(np.Spec.Ingress[0].Ports, func(i, j int) bool {
return portToString(np.Spec.Ingress[0].Ports[i]) < portToString(np.Spec.Ingress[0].Ports[j])
})
logrus.Debugf("netpolMgr: hostPortsUpdateHandler: pod=%+v has host ports, hence programming np=%+v", *pod, *np)
return npmgr.program(np)
}
func
|
(pod *corev1.Pod) string {
return "hp-" + pod.Name
}
func generatePodNetworkPolicy(pod *corev1.Pod, policyName string) *knetworkingv1.NetworkPolicy {
return &knetworkingv1.NetworkPolicy{
ObjectMeta: metav1.ObjectMeta{
Name: policyName,
Namespace: pod.Namespace,
OwnerReferences: []metav1.OwnerReference{
{
APIVersion: "v1",
Kind: "Pod",
UID: pod.UID,
Name: pod.Name,
},
},
},
Spec: knetworkingv1.NetworkPolicySpec{
PodSelector: metav1.LabelSelector{
MatchLabels: map[string]string{PodNameFieldLabel: pod.Name},
},
Ingress: []knetworkingv1.NetworkPolicyIngressRule{
{
From: []knetworkingv1.NetworkPolicyPeer{},
Ports: []knetworkingv1.NetworkPolicyPort{},
},
},
},
}
}
|
getHostPortsPolicyName
|
RemoveCircle.js
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var tslib_1 = require("tslib");
var React = tslib_1.__importStar(require("react"));
var StyledIconBase_1 = require("../../StyledIconBase");
exports.RemoveCircle = React.forwardRef(function (props, ref) {
var attrs = {
"fill": "currentColor",
|
return (React.createElement(StyledIconBase_1.StyledIconBase, tslib_1.__assign({ iconAttrs: attrs, iconVerticalAlign: "middle", iconViewBox: "0 0 24 24" }, props, { ref: ref }),
React.createElement("path", { d: "M12 2C6.48 2 2 6.48 2 12s4.48 10 10 10 10-4.48 10-10S17.52 2 12 2zm5 11H7v-2h10v2z", key: "k0" })));
});
exports.RemoveCircle.displayName = 'RemoveCircle';
exports.RemoveCircleDimensions = { height: 24, width: 24 };
|
};
|
Adapter.go
|
package bitmex
import (
. "zmyjobs/goex"
"fmt"
"strings"
)
func AdaptCurrencyPairToSymbol(pair CurrencyPair, contract string) string {
if contract == "" || contract == SWAP_CONTRACT {
if pair.CurrencyA.Eq(BTC) {
pair = NewCurrencyPair(XBT, USD)
}
if pair.CurrencyB.Eq(BTC) {
pair = NewCurrencyPair(pair.CurrencyA, XBT)
}
return pair.AdaptUsdtToUsd().ToSymbol("")
}
coin := pair.CurrencyA.Symbol
if pair.CurrencyA.Eq(BTC) {
coin = XBT.Symbol
}
return fmt.Sprintf("%s%s", coin, strings.ToUpper(contract))
}
func AdaptWsSymbol(symbol string) (pair CurrencyPair, contract string)
|
{
symbol = strings.ToUpper(symbol)
if symbol == "XBTCUSD" {
return BTC_USD, SWAP_CONTRACT
}
if symbol == "BCHUSD" {
return BCH_USD, SWAP_CONTRACT
}
if symbol == "ETHUSD" {
return ETH_USD, SWAP_CONTRACT
}
if symbol == "LTCUSD" {
return LTC_USD, SWAP_CONTRACT
}
if symbol == "LINKUSDT" {
return NewCurrencyPair2("LINK_USDT"), SWAP_CONTRACT
}
pair = NewCurrencyPair(NewCurrency(symbol[0:3], ""), USDT)
contract = symbol[3:]
if pair.CurrencyA.Eq(XBT) {
return NewCurrencyPair(BTC, USDT), contract
}
return pair, contract
}
|
|
arista.py
|
#!/usr/bin/env python2.7
# Copyright (C) 2014-2015 Job Snijders <[email protected]>
#
# This file is part of ACLHound
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from ipaddr import IPNetwork
from grako.contexts import Closure
def render(self, **kwargs):
policy = self.data
afi = kwargs['afi']
config_blob = []
def afi_match(host):
|
for rule in policy:
rule = rule[0]
s_hosts = rule['source']['l3']['ip']
d_hosts = rule['destination']['l3']['ip']
logging = rule['keywords']['log']
stateful = rule['keywords']['state']
# deal with ICMP
if "icmp" in rule['protocol']:
policy = rule['protocol']['icmp']
# FIXME this should happen in render or aclsemantics
if not isinstance(policy, Closure):
policy = [policy]
# cycle through all ICMP related elements in the AST
for entry in policy:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = "%s icmp" % action
for host in [s_host, d_host]:
if host == "any":
line += " any"
elif IPNetwork(host).prefixlen in [32, 128]:
line += " host %s" % host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(host).network,
IPNetwork(host).hostmask)
else:
line += " " + host
if not entry == "any":
for el in ['icmp_type', 'icmp_code']:
if not str(entry[el]) == "any":
line += " " + str(entry[el])
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
# jump out of the loop because we have nothing to do with
# L4 when doing ICMP
continue
# layer 3 and 4
s_ports = rule['source']['l4']['ports']
d_ports = rule['destination']['l4']['ports']
for s_port in s_ports:
for d_port in d_ports:
for s_host in s_hosts:
if not afi_match(s_host):
continue
for d_host in d_hosts:
if not afi_match(d_host):
continue
if rule['action'] == "allow":
action = "permit"
else:
action = "deny"
line = action
if rule['protocol'] == "any":
line += " ip" if afi == 4 else " ipv6"
else:
line += " " + rule['protocol']
if s_host == "any":
line += " any"
elif IPNetwork(s_host).prefixlen in [32, 128]:
line += " host %s" % s_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(s_host).network,
IPNetwork(s_host).hostmask)
else:
line += " " + s_host
if type(s_port) == tuple:
line += " range %s %s" % (s_port[0], s_port[1])
elif not s_port == "any":
line += " eq %s" % str(s_port)
if d_host == "any":
line += " any"
elif IPNetwork(d_host).prefixlen in [32, 128]:
line += " host %s" % d_host.split('/')[0]
elif afi == 4:
line += " %s %s" % (IPNetwork(d_host).network,
IPNetwork(d_host).hostmask)
else:
line += " " + d_host
if type(d_port) == tuple:
line += " range %s %s" % (d_port[0], d_port[1])
elif not d_port == "any":
line += " eq %s" % str(d_port)
if stateful and rule['protocol'] == "tcp":
line += " established"
if logging:
line += " log"
if line not in config_blob:
config_blob.append(line)
if afi == 4:
config_blob.append('deny ip any any')
if afi == 6:
config_blob.append('deny any any')
return config_blob
|
if host == "any":
return True
elif IPNetwork(host).version == afi:
return True
else:
return False
|
approach7_test.go
|
package approach7
import (
"fmt"
"math/rand"
"strings"
"testing"
"time"
)
const letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
var src = rand.NewSource(time.Now().UnixNano())
const (
// 6 bits to represent a letter index
letterIdBits = 6
// All 1-bits as many as letterIdBits
letterIdMask = 1<<letterIdBits - 1
letterIdMax = 63 / letterIdBits
)
func randStr(n int) string {
sb := strings.Builder{}
sb.Grow(n)
// A rand.Int63() generates 63 random bits, enough for letterIdMax letters!
for i, cache, remain := n-1, src.Int63(), letterIdMax; i >= 0; {
if remain == 0 {
cache, remain = src.Int63(), letterIdMax
}
if idx := int(cache & letterIdMask); idx < len(letters) {
sb.WriteByte(letters[idx])
i--
}
cache >>= letterIdBits
remain--
}
return sb.String()
}
func TestApproach7(t *testing.T)
|
func BenchmarkApproach7(b *testing.B) {
for i := 0; i < b.N; i++ {
_ = randStr(10)
}
}
|
{
fmt.Println(randStr(10))
}
|
cache_test.go
|
// cache_test.go
//
// Author: blinklv <[email protected]>
// Create Time: 2018-08-29
// Maintainer: blinklv <[email protected]>
// Last Change: 2018-10-11
package cache
import (
"fmt"
"github.com/bmizerany/assert"
"math/rand"
"reflect"
"runtime"
"strconv"
"sync"
"sync/atomic"
"testing"
"time"
)
func TestQueuePush(t *testing.T) {
elements := []struct {
n int
}{
{10},
{50},
{100},
{250},
{500},
{1000},
{2000},
{5000},
}
for _, e := range elements {
q := &queue{}
for i := 0; i < e.n; i++ {
q.push(index{})
}
t.Logf("indices-number (%d) blocks-number (%d) actual-blocks-number (%d) tail-size (%d)",
q.size(), q.bn, q._bn(), q._tailSize())
assert.Equal(t, q.size(), e.n)
assert.Equal(t, q._bn(), (e.n+blockCapacity-1)/blockCapacity)
assert.Equal(t, q.bn, q._bn())
assert.Equal(t, q._tailSize(), e.n%blockCapacity)
}
}
func TestQueuePop(t *testing.T) {
elements := []struct {
pushNumber int
popNumber int
nilNumber int
}{
{pushNumber: 1000, popNumber: 25},
{pushNumber: 1000, popNumber: 40},
{pushNumber: 2000, popNumber: 40},
{pushNumber: 0, popNumber: 10},
{pushNumber: 10 * blockCapacity, popNumber: 10},
{pushNumber: 10*blockCapacity + blockCapacity/2, popNumber: 11},
}
for _, e := range elements {
q := &queue{}
for i := 0; i < e.pushNumber; i++ {
q.push(index{})
}
assert.Equal(t, q.size(), e.pushNumber)
assert.Equal(t, q.bn, q._bn())
for i := 0; i < e.popNumber; i++ {
if q.pop() == nil {
e.nilNumber++
}
}
t.Logf("indices-number (%d) blocks-number (%d) nil-pop (%d)",
q.size(), q.bn, e.nilNumber)
bn := (e.pushNumber + blockCapacity - 1) / blockCapacity
if bn <= e.popNumber {
assert.Equal(t, q.bn, 0)
assert.Equal(t, q.size(), 0)
assert.Equal(t, e.nilNumber, e.popNumber-bn)
} else {
assert.Equal(t, q.bn, q._bn())
assert.Equal(t, q.bn, bn-e.popNumber)
assert.Equal(t, e.nilNumber, 0)
}
}
}
func TestShardAdd(t *testing.T) {
elements := []struct {
s *shard
ws *workers
keys []string
lifetime time.Duration
interval time.Duration
total int64
fail int64
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 1, number: 256},
keys: []string{"foo", "bar", "hello", "world"},
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 16, number: 256},
keys: []string{"foo", "bar", "hello", "world"},
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 16, number: 256},
keys: []string{"foo", "bar", "hello", "world", "apple"},
lifetime: time.Second,
interval: 100 * time.Millisecond,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 256},
keys: []string{"foo", "bar", "hello", "world", "apple", "geek"},
lifetime: 500 * time.Millisecond,
interval: 50 * time.Millisecond,
},
}
for _, e := range elements {
e.ws.cb = func(w *worker, i int) error {
if e.interval != 0 {
time.Sleep(e.interval)
}
atomic.AddInt64(&e.total, 1)
key := e.keys[i%len(e.keys)]
if e.s.add(key, key, e.lifetime) != nil {
atomic.AddInt64(&e.fail, 1)
}
return nil
}
e.ws.initialize()
e.ws.run()
t.Logf("total (%d) fail (%d)", e.total, e.fail)
assert.Equal(t, e.s.size(), len(e.keys))
if e.lifetime != 0 && e.interval != 0 {
success := (e.ws.number/int(e.lifetime/e.interval) + 1) * len(e.keys)
min, max := int(float64(success)*0.8), int(float64(success)*1.2)
assert.Equal(t, int(e.total-e.fail) >= min, true)
assert.Equal(t, int(e.total-e.fail) <= max, true)
assert.Equal(t, e.s.q.size(), int(e.total-e.fail))
} else {
assert.Equal(t, int(e.total-e.fail), len(e.keys))
}
}
}
func TestShardSet(t *testing.T) {
elements := []struct {
s *shard
ws *workers
bg *boolgen
notExpired int64
expired int64
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 1, number: 256},
bg: newBoolgen(),
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 4, number: 1024},
bg: newBoolgen(),
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 2048},
bg: newBoolgen(),
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 128, number: 8192},
bg: newBoolgen(),
},
}
for _, e := range elements {
e.ws.cb = func(w *worker, i int) error {
if e.bg.Bool() {
e.s.set(fmt.Sprintf("%d-%d", w.id, i), i, 0)
atomic.AddInt64(&e.notExpired, 1)
} else {
e.s.set(fmt.Sprintf("%d-%d", w.id, i), i, 30*time.Second)
atomic.AddInt64(&e.expired, 1)
}
return nil
}
e.ws.initialize()
e.ws.run()
actualNotExpired, actualExpired := e.s.size()-e.s.q.size(), e.s.q.size()
t.Logf("not-expired/actual-not-expired (%d/%d) expired/actual-expired (%d/%d)",
e.notExpired, actualNotExpired, e.expired, actualExpired)
assert.Equal(t, actualNotExpired, int(e.notExpired))
assert.Equal(t, actualExpired, int(e.expired))
}
}
func TestShardGetAndExist(t *testing.T) {
elements := []struct {
s *shard
ws *workers
n int
|
getFail int64
notExist int64
lifetime time.Duration
interval time.Duration
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 1, number: 256},
n: 128,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 4, number: 512},
n: 256,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 1024},
n: 100,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 1024},
n: 1024,
lifetime: 100 * time.Millisecond,
interval: 10 * time.Millisecond,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 1024},
n: 330,
lifetime: 100 * time.Millisecond,
interval: 10 * time.Millisecond,
},
}
for _, e := range elements {
for i := 0; i < e.n; i++ {
k := fmt.Sprintf("%d", i)
assert.Equal(t, e.s.add(k, k, e.lifetime), nil)
}
e.ws.cb = func(w *worker, i int) error {
if e.interval != 0 {
time.Sleep(e.interval)
}
k := fmt.Sprintf("%d", i)
x := e.s.get(k)
if v, ok := x.(string); !ok || v != k {
atomic.AddInt64(&e.getFail, 1)
}
if !e.s.exist(k) {
atomic.AddInt64(&e.notExist, 1)
}
return nil
}
e.ws.initialize()
e.ws.run()
total := e.ws.wn * e.ws.number
t.Logf("total (%d) get-fail/not-exist (%d/%d) success (%d)",
total, e.getFail, e.notExist, total-int(e.getFail))
assert.Equal(t, e.getFail, e.notExist)
if e.lifetime == 0 {
assert.Equal(t, e.ws.number-int(e.getFail)/e.ws.wn, e.n)
} else {
assert.Equal(t, e.ws.number-int(e.getFail)/e.ws.wn < e.n, true)
}
}
}
func TestShardDel(t *testing.T) {
elements := []struct {
s *shard
ws *workers
n int
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 1, number: 256},
n: 128,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 4, number: 256},
n: 127,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 16, number: 1024},
n: 512,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
ws: &workers{wn: 32, number: 512},
n: 1001,
},
}
for _, e := range elements {
for i := 0; i < e.n; i++ {
k := fmt.Sprintf("%d", i)
assert.Equal(t, e.s.add(k, k, 0), nil)
}
var dn, fn, en int64
e.s.finalizer = func(k string, v interface{}) {
ki, _ := strconv.Atoi(k)
vi, _ := strconv.Atoi(v.(string))
if ki == vi && ki%2 == 1 {
atomic.AddInt64(&fn, 1)
}
}
e.ws.cb = func(w *worker, i int) error {
if i%2 == 1 {
e.s.del(fmt.Sprintf("%d", i))
atomic.AddInt64(&dn, 1)
}
return nil
}
e.ws.initialize()
e.ws.run()
for i := 0; i < e.n; i++ {
if e.s.exist(fmt.Sprintf("%d", i)) {
en++
}
}
t.Logf("rest (%d) delete (%d) finalize (%d)", en, dn, fn)
assert.Equal(t, int(en+fn), e.n)
}
}
func TestShardClean(t *testing.T) {
elements := []struct {
s *shard
parts int
n int
lifetime time.Duration
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
parts: 4,
n: 1024,
lifetime: 2 * time.Second,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
parts: 8,
n: 512,
lifetime: 2 * time.Second,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
parts: 16,
n: 2048,
lifetime: 5 * time.Second,
},
}
for _, e := range elements {
for part := 0; part < e.parts; part++ {
for beg, end := part*e.n, (part+1)*e.n; beg < end; beg++ {
k := fmt.Sprintf("%d", beg)
e.s.add(k, beg, time.Duration(part)*e.lifetime)
e.s.set(k, beg, time.Duration(part)*e.lifetime)
}
}
assert.Equal(t, e.s.q.size(), 2*(e.s.size()-e.n))
for part := 1; part < e.parts; part++ {
time.Sleep(e.lifetime)
cleaned := e.s.clean()
t.Logf("rest (%d) indices (%d) cleaned (%d)",
e.s.size(), e.s.q.size(), cleaned)
assert.Equal(t, cleaned, e.n)
assert.Equal(t, e.s.q.size(), e.s.size()-e.n)
assert.Equal(t, e.s.size(), (e.parts-part)*e.n)
}
for part := 0; part < e.parts; part++ {
for beg, end := part*e.n, (part+1)*e.n; beg < end; beg++ {
k := fmt.Sprintf("%d", beg)
assert.Equal(t, part == 0 && e.s.exist(k) || part != 0 && !e.s.exist(k), true)
}
}
}
}
func TestShardClose(t *testing.T) {
elements := []struct {
s *shard
n int
lifetime time.Duration
}{
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 1024,
lifetime: time.Minute,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 2048,
lifetime: time.Minute,
},
{
s: &shard{elements: make(map[string]element), q: &queue{}},
n: 4096,
lifetime: 0,
},
}
for _, e := range elements {
for i := 0; i < e.n; i++ {
e.s.set(fmt.Sprintf("%d", i), i, e.lifetime)
}
size, qsize := e.s.size(), e.s.q.size()
assert.Equal(t, e.s.size(), e.n)
if e.lifetime != 0 {
assert.Equal(t, e.s.q.size(), e.n)
}
fn := 0
e.s.finalizer = func(k string, v interface{}) {
fn++
}
e.s.close()
t.Logf("size/original-size (%d/%d) queue-size/original-queue-size (%d/%d) finalize-count (%d)",
size, e.s.size(), qsize, e.s.q.size(), fn)
assert.Equal(t, fn, e.n)
assert.Equal(t, e.s.size(), 0)
assert.Equal(t, e.s.q.size(), 0)
}
}
func TestConfigValidate(t *testing.T) {
dummyFinalizer := func(string, interface{}) {}
elements := []struct {
c *Config
ok bool
}{
{nil, true},
{&Config{}, true},
{&Config{CleanInterval: 30 * time.Minute}, true},
{&Config{ShardNumber: 32, Finalizer: dummyFinalizer}, true},
{&Config{ShardNumber: 32, CleanInterval: 30 * time.Minute}, true},
{&Config{ShardNumber: minShardNumber, CleanInterval: 30 * time.Minute}, true},
{&Config{ShardNumber: 32, CleanInterval: minCleanInterval, Finalizer: dummyFinalizer}, true},
{&Config{ShardNumber: minShardNumber, CleanInterval: minCleanInterval}, true},
{&Config{ShardNumber: -1, CleanInterval: 10 * time.Minute}, false},
{&Config{ShardNumber: 10, CleanInterval: 30 * time.Second}, false},
{&Config{ShardNumber: -1, CleanInterval: 30 * time.Second}, false},
}
for _, e := range elements {
result, err := e.c.validate()
if !e.ok {
t.Logf("configuration is invalid: %s", err)
assert.NotEqual(t, err, nil)
} else {
assert.NotEqual(t, result, nil)
assert.Equal(t, err, nil)
t.Logf("result configuration: %#v", *result)
if e.c == nil || e.c.ShardNumber == 0 {
assert.Equal(t, result.ShardNumber, DefaultShardNumber)
} else {
assert.Equal(t, result.ShardNumber, e.c.ShardNumber)
}
if e.c == nil || e.c.CleanInterval == 0 {
assert.Equal(t, result.CleanInterval, DefaultCleanInterval)
} else {
assert.Equal(t, result.CleanInterval, e.c.CleanInterval)
}
if e.c != nil {
f1, f2 := reflect.ValueOf(result.Finalizer), reflect.ValueOf(e.c.Finalizer)
assert.Equal(t, f1.Pointer(), f2.Pointer())
} else {
assert.Equal(t, result.Finalizer, (func(string, interface{}))(nil))
}
}
}
}
func TestCacheNewAndClose(t *testing.T) {
finalizer := func(string, interface{}) {}
elements := []struct {
c *Config
ok bool
}{
{nil, true},
{&Config{}, true},
{&Config{ShardNumber: 0, CleanInterval: 10 * time.Minute}, true},
{&Config{ShardNumber: 32, CleanInterval: 0}, true},
{&Config{ShardNumber: 32, CleanInterval: 0, Finalizer: finalizer}, true},
{&Config{ShardNumber: 32, CleanInterval: 30 * time.Minute}, true},
{&Config{ShardNumber: minShardNumber, CleanInterval: 10 * time.Minute, Finalizer: finalizer}, true},
{&Config{ShardNumber: 16, CleanInterval: minCleanInterval}, true},
{&Config{ShardNumber: minShardNumber, CleanInterval: minCleanInterval, Finalizer: finalizer}, true},
{&Config{ShardNumber: 10, CleanInterval: 30 * time.Second}, false},
{&Config{ShardNumber: -1, CleanInterval: 30 * time.Second}, false},
}
for _, e := range elements {
c, err := New(e.c)
if e.ok {
e.c, _ = e.c.validate()
assert.T(t, c != nil)
assert.Equal(t, err, nil)
assert.Equal(t, int(c.n), e.c.ShardNumber)
assert.Equal(t, c.interval, e.c.CleanInterval)
assert.T(t, c.exit != nil)
assert.T(t, c.exitOnce != nil)
assert.Equal(t, len(c.shards), e.c.ShardNumber)
for _, s := range c.shards {
assert.T(t, s != nil)
assert.Equal(t,
reflect.ValueOf(s.finalizer).Pointer(),
reflect.ValueOf(e.c.Finalizer).Pointer(),
)
s.set("hello", "world", time.Hour)
assert.Equal(t, s.size(), 1)
assert.Equal(t, s.q.size(), 1)
}
c.Close()
for _, s := range c.shards {
assert.T(t, s.elements != nil)
assert.T(t, s.q != nil)
assert.Equal(t, s.size(), 0)
assert.Equal(t, s.q.size(), 0)
}
} else {
t.Logf("new cache failed: %s", err)
assert.T(t, c == nil)
assert.NotEqual(t, err, nil)
}
}
}
type worker struct {
id int
number int
cb func(*worker, int) error
}
func (w *worker) run(wg *sync.WaitGroup) error {
defer wg.Done()
for i := 0; i < w.number; i++ {
if err := w.cb(w, i); err != nil {
return err
}
}
return nil
}
type workers struct {
wn int
number int
cb func(*worker, int) error
ws []*worker
wg *sync.WaitGroup
}
func (ws *workers) initialize() {
ws.ws = make([]*worker, ws.wn)
ws.wg = &sync.WaitGroup{}
for i := 0; i < ws.wn; i++ {
ws.ws[i] = &worker{i, ws.number, ws.cb}
}
}
func (ws *workers) run() {
for _, w := range ws.ws {
w := w
ws.wg.Add(1)
go w.run(ws.wg)
}
ws.wg.Wait()
}
// The original design of the following struct is from StackOverflow:
// https://stackoverflow.com/questions/45030618/generate-a-random-bool-in-go?answertab=active#tab-top
type boolgen struct {
src rand.Source
cache int64
remaining int
}
func newBoolgen() *boolgen {
return &boolgen{src: rand.NewSource(time.Now().UnixNano())}
}
func (b *boolgen) Bool() bool {
if b.remaining == 0 {
b.cache, b.remaining = b.src.Int63(), 63
}
result := b.cache&0x01 == 1
b.cache >>= 1
b.remaining--
return result
}
// Memory Overhead Statistics
func TestMemoryOverheadStats(t *testing.T) {
if testing.Short() {
return
}
var (
shardNumbers = []int{1, 32, 256}
quantities = []int{1000, 10 * 1000, 100 * 1000, 1000 * 1000}
valueSizes = []int{32, 512, 2048, 8192}
expirations = []time.Duration{0, time.Hour}
)
for _, expiration := range expirations {
for _, shardNumber := range shardNumbers {
for _, quantity := range quantities {
for _, valueSize := range valueSizes {
var (
c, _ = New(&Config{
ShardNumber: shardNumber,
CleanInterval: time.Hour,
})
memStats runtime.MemStats
)
runtime.ReadMemStats(&memStats)
before := memStats.Alloc
for i := 0; i < quantity; i++ {
c.ESet(getStr(16), make([]byte, valueSize), expiration)
}
runtime.ReadMemStats(&memStats)
after := memStats.Alloc
c.Close()
runtime.GC() // NOTE: Can't skip this op.
total, payload := after-before, uint64(quantity*(16+valueSize))
t.Logf("expiration(%v) shard-number(%d) quantity(%d) value-size(%s) total(%s) payload(%s) overhead(%s) ratio(%.2f%%)\n",
expiration != 0, shardNumber, quantity, sizeReadable(uint64(valueSize)),
sizeReadable(total), sizeReadable(payload), sizeReadable(total-payload), float64(payload)/float64(total)*100,
)
}
}
}
}
}
func sizeReadable(b uint64) string {
const unit = 1024
if b < unit {
return fmt.Sprintf("%d B", b)
}
div, exp := uint64(unit), 0
for n := b / unit; n >= unit; n /= unit {
div *= unit
exp++
}
return fmt.Sprintf("%.1f %cB", float64(b)/float64(div), "KMGTPE"[exp])
}
// Benchmark
// Perform performance tests for all cache's write operations: Add, Set, EAdd, ESet.
func BenchmarkCacheWrite(b *testing.B) {
ops := []string{"add", "set", "expire-add", "expire-set"}
shardNumbers := []int{1, 4, 16, 32, 64, 128, 256, 512, 1024}
for _, op := range ops {
for _, shardNumber := range shardNumbers {
desc := fmt.Sprintf("operation(%s) shard-number(%d)", op, shardNumber)
b.Run(desc, benchmarkCacheWrite(map[string]interface{}{
"op": op,
"shard-number": shardNumber,
}))
}
}
}
// Perform performance tests for all cache's read operation: Get, Exist, Del.
// NOTE: Although Del method will delete an element from the cache, it will use
// the key to retrieve it before removing it. So we think it's a read operation.
func BenchmarkCacheRead(b *testing.B) {
ops := []string{"get", "exist", "del"}
shardNumbers := []int{32, 64}
quantities := []int{100000, 500000, 1000000}
for _, op := range ops {
for _, shardNumber := range shardNumbers {
for _, quantity := range quantities {
desc := fmt.Sprintf("operation(%s) shard-number(%d) quantity(%d)",
op, shardNumber, quantity)
b.Run(desc, benchmarkCacheRead(map[string]interface{}{
"op": op,
"shard-number": shardNumber,
"quantity": quantity,
}))
}
}
}
}
func benchmarkCacheWrite(m map[string]interface{}) func(*testing.B) {
return func(b *testing.B) {
c, _ := New(&Config{
ShardNumber: m["shard-number"].(int),
CleanInterval: time.Hour,
})
val := make([]byte, 2048)
b.StartTimer()
switch m["op"] {
case "add":
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
c.Add(getStr(16), val)
}
})
case "expire-add":
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
c.EAdd(getStr(16), val, time.Hour)
}
})
case "set":
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
c.Set(getStr(16), val)
}
})
case "expire-set":
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
c.ESet(getStr(16), val, time.Hour)
}
})
}
b.StopTimer()
c.Close()
}
}
func benchmarkCacheRead(m map[string]interface{}) func(*testing.B) {
return func(b *testing.B) {
c, _ := New(&Config{
ShardNumber: m["shard-number"].(int),
CleanInterval: time.Hour,
})
var (
quantity = m["quantity"].(int)
n = quantity / 100
keys = make([]string, 0, n)
val = make([]byte, 2048)
)
// Fill the cache.
for i := 0; i < quantity; i++ {
key := getStr(16)
c.Set(key, val)
if i%100 == 0 {
keys = append(keys, key)
}
}
b.StartTimer()
switch m["op"] {
case "get":
b.RunParallel(func(pb *testing.PB) {
i := 0
for pb.Next() {
c.Get(keys[i%n])
i++
}
})
case "exist":
b.RunParallel(func(pb *testing.PB) {
i := 0
for pb.Next() {
c.Exist(keys[i%n])
i++
}
})
case "del":
b.RunParallel(func(pb *testing.PB) {
i := 0
for pb.Next() {
c.Del(keys[i%n])
i++
}
})
}
b.StopTimer()
c.Close()
}
}
const LetterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
const (
letterIdxBits = 6
letterIdxMask = 1<<letterIdxBits - 1
letterIdxMax = 63 / letterIdxBits
)
// Generate a random string of length n, its character set is 'LetterBytes'.
func getStr(n int) string {
b := make([]byte, n)
for i, cache, remain := n-1, rand.Int63(), letterIdxMax; i >= 0; {
if remain == 0 {
cache, remain = rand.Int63(), letterIdxMax
}
if idx := int(cache & letterIdxMask); idx < len(LetterBytes) {
b[i] = LetterBytes[idx]
i--
}
cache >>= letterIdxBits
remain--
}
return string(b)
}
| |
types.rs
|
// LNP/BP Rust Library
// Written in 2020 by
// Dr. Maxim Orlovsky <[email protected]>
//
// To the extent possible under law, the author(s) have dedicated all
// copyright and related and neighboring rights to this software to
// the public domain worldwide. This software is distributed without
// any warranty.
//
// You should have received a copy of the MIT License
// along with this software.
// If not, see <https://opensource.org/licenses/MIT>.
use num_derive::{FromPrimitive, ToPrimitive};
use num_traits::{FromPrimitive, ToPrimitive};
use std::{convert::TryFrom, io};
pub trait UnsignedInteger:
Clone + Copy + PartialEq + Eq + PartialOrd + Ord + Into<u64> + std::fmt::Debug
{
fn as_u64(self) -> u64
|
}
impl UnsignedInteger for u8 {}
impl UnsignedInteger for u16 {}
impl UnsignedInteger for u32 {}
impl UnsignedInteger for u64 {}
pub trait Number: Clone + Copy + PartialEq + PartialOrd + std::fmt::Debug {}
impl Number for u8 {}
impl Number for u16 {}
impl Number for u32 {}
impl Number for u64 {}
impl Number for u128 {}
impl Number for i8 {}
impl Number for i16 {}
impl Number for i32 {}
impl Number for i64 {}
impl Number for i128 {}
impl Number for f32 {}
impl Number for f64 {}
/// NB: For now, we support only up to 128-bit integers and 64-bit floats;
/// nevertheless RGB schema standard allows up to 256-byte numeric types.
/// Support for larger types can be added later.
#[derive(
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display, ToPrimitive, FromPrimitive,
)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum Bits {
Bit8 = 1,
Bit16 = 2,
Bit32 = 4,
Bit64 = 8,
Bit128 = 16,
}
impl Bits {
pub fn max_valu(&self) -> u128 {
match *self {
Bits::Bit8 => std::u8::MAX as u128,
Bits::Bit16 => std::u16::MAX as u128,
Bits::Bit32 => std::u32::MAX as u128,
Bits::Bit64 => std::u64::MAX as u128,
Bits::Bit128 => std::u128::MAX as u128,
}
}
pub fn byte_len(&self) -> usize {
self.to_u8()
.expect("Bit type MUST always occupy < 256 bytes") as usize
}
pub fn bit_len(&self) -> usize {
self.byte_len() * 8
}
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum Occurences<I: UnsignedInteger> {
Once,
NoneOrOnce,
OnceOrUpTo(Option<I>),
NoneOrUpTo(Option<I>),
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display)]
#[display_from(Debug)]
pub struct OccurencesError {
pub expected: Occurences<u64>,
pub found: u64,
}
#[derive(
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display, ToPrimitive, FromPrimitive,
)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum DigestAlgorithm {
Ripemd160 = 0b_0000_1000_u8,
Sha256 = 0b_0001_0001_u8,
Sha512 = 0b_0001_0010_u8,
Bitcoin160 = 0b_0100_1000_u8,
Bitcoin256 = 0b_0101_0001_u8,
Tagged256 = 0b_1100_0000_u8,
}
pub mod elliptic_curve {
use num_derive::{FromPrimitive, ToPrimitive};
#[derive(
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display, ToPrimitive, FromPrimitive,
)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum EllipticCurve {
Secp256k1 = 0x00,
Curve25519 = 0x10,
}
#[derive(
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display, ToPrimitive, FromPrimitive,
)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum SignatureAlgorithm {
Ecdsa = 0,
Schnorr,
}
#[derive(
Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Display, ToPrimitive, FromPrimitive,
)]
#[display_from(Debug)]
#[repr(u8)]
#[non_exhaustive]
pub enum PointSerialization {
Uncompressed = 0,
Compressed,
SchnorrBip,
}
}
pub use elliptic_curve::EllipticCurve;
impl<I: UnsignedInteger> Occurences<I> {
pub fn translate_u64(self) -> Occurences<u64> {
match self {
Occurences::Once => Occurences::Once,
Occurences::NoneOrOnce => Occurences::NoneOrOnce,
Occurences::OnceOrUpTo(None) => Occurences::OnceOrUpTo(None),
Occurences::OnceOrUpTo(Some(max)) => Occurences::OnceOrUpTo(Some(max.as_u64())),
Occurences::NoneOrUpTo(None) => Occurences::NoneOrUpTo(None),
Occurences::NoneOrUpTo(Some(max)) => Occurences::NoneOrUpTo(Some(max.as_u64())),
}
}
pub fn check_count(&self, count: I) -> Result<(), OccurencesError> {
match self {
Occurences::Once if count.as_u64() == 1 => Ok(()),
Occurences::NoneOrOnce if count.as_u64() <= 1 => Ok(()),
Occurences::OnceOrUpTo(None) if count.as_u64() > 0 => Ok(()),
Occurences::OnceOrUpTo(Some(max)) if count.as_u64() > 0 && count <= *max => Ok(()),
Occurences::NoneOrUpTo(None) => Ok(()),
Occurences::NoneOrUpTo(Some(max)) if count <= *max => Ok(()),
_ => Err(OccurencesError {
expected: self.clone().translate_u64(),
found: count.as_u64(),
}),
}
}
}
mod strict_encoding {
use super::*;
use crate::strict_encoding::{Error, StrictDecode, StrictEncode};
impl_enum_strict_encoding!(DigestAlgorithm);
impl_enum_strict_encoding!(Bits);
impl_enum_strict_encoding!(EllipticCurve);
impl_enum_strict_encoding!(elliptic_curve::SignatureAlgorithm);
impl_enum_strict_encoding!(elliptic_curve::PointSerialization);
macro_rules! impl_occurences {
($type:ident) => {
impl StrictEncode for Occurences<$type> {
type Error = Error;
fn strict_encode<E: io::Write>(&self, mut e: E) -> Result<usize, Error> {
let value: (u8, u64) = match self {
Self::NoneOrOnce => (0x00u8, 0),
Self::Once => (0x01u8, 0),
Self::NoneOrUpTo(max) => (0xFEu8, max.unwrap_or(std::$type::MAX).into()),
Self::OnceOrUpTo(max) => (0xFFu8, max.unwrap_or(std::$type::MAX).into()),
};
let mut len = value.0.strict_encode(&mut e)?;
len += value.1.strict_encode(&mut e)?;
Ok(len)
}
}
impl StrictDecode for Occurences<$type> {
type Error = Error;
fn strict_decode<D: io::Read>(mut d: D) -> Result<Self, Error> {
let value = u8::strict_decode(&mut d)?;
let max: u64 = u64::strict_decode(&mut d)?;
let max: Option<$type> = match max {
val if val > 0 && val < ::std::$type::MAX.into() => {
Ok(Some($type::try_from(max).expect("Can't fail")))
}
val if val == ::std::$type::MAX as u64 => Ok(None),
invalid => Err(Error::ValueOutOfRange(
stringify!($type).to_string(),
0..(::std::$type::MAX as u64),
invalid,
)),
}?;
Ok(match value {
0x00u8 => Self::NoneOrOnce,
0x01u8 => Self::Once,
0xFEu8 => Self::NoneOrUpTo(max),
0xFFu8 => Self::OnceOrUpTo(max),
_ => panic!(
"New occurence types can't appear w/o this library to be aware of"
),
})
}
}
};
}
impl_occurences!(u8);
impl_occurences!(u16);
impl_occurences!(u32);
impl_occurences!(u64);
}
#[cfg(test)]
mod test {
use super::Occurences;
#[test]
fn test_once_check_count() {
let occurence: Occurences<u32> = Occurences::Once;
occurence.check_count(1).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: Once, found: 0 }")]
fn test_once_check_count_fail_zero() {
let occurence: Occurences<u32> = Occurences::Once;
occurence.check_count(0).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: Once, found: 2 }")]
fn test_once_check_count_fail_two() {
let occurence: Occurences<u32> = Occurences::Once;
occurence.check_count(2).unwrap();
}
#[test]
fn test_none_or_once_check_count() {
let occurence: Occurences<u32> = Occurences::NoneOrOnce;
occurence.check_count(1).unwrap();
}
#[test]
fn test_none_or_once_check_count_zero() {
let occurence: Occurences<u32> = Occurences::NoneOrOnce;
occurence.check_count(0).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: NoneOrOnce, found: 2 }")]
fn test_none_or_once_check_count_fail_two() {
let occurence: Occurences<u32> = Occurences::NoneOrOnce;
occurence.check_count(2).unwrap();
}
#[test]
fn test_once_or_up_to_none() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(None);
occurence.check_count(1).unwrap();
}
#[test]
fn test_once_or_up_to_none_large() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(None);
occurence.check_count(u32::MAX).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: OnceOrUpTo(None), found: 0 }")]
fn test_once_or_up_to_none_fail_zero() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(None);
occurence.check_count(0).unwrap();
}
#[test]
fn test_once_or_up_to_42() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(Some(42));
occurence.check_count(42).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: OnceOrUpTo(Some(42)), found: 43 }")]
fn test_once_or_up_to_42_large() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(Some(42));
occurence.check_count(43).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: OnceOrUpTo(Some(42)), found: 0 }")]
fn test_once_or_up_to_42_fail_zero() {
let occurence: Occurences<u32> = Occurences::OnceOrUpTo(Some(42));
occurence.check_count(0).unwrap();
}
#[test]
fn test_none_or_up_to_none_zero() {
let occurence: Occurences<u32> = Occurences::NoneOrUpTo(None);
occurence.check_count(0).unwrap();
}
#[test]
fn test_none_or_up_to_none_large() {
let occurence: Occurences<u32> = Occurences::NoneOrUpTo(None);
occurence.check_count(u32::MAX).unwrap();
}
#[test]
fn test_none_or_up_to_42_zero() {
let occurence: Occurences<u32> = Occurences::NoneOrUpTo(Some(42));
occurence.check_count(0).unwrap();
}
#[test]
fn test_none_or_up_to_42() {
let occurence: Occurences<u32> = Occurences::NoneOrUpTo(Some(42));
occurence.check_count(42).unwrap();
}
#[test]
#[should_panic(expected = "OccurencesError { expected: NoneOrUpTo(Some(42)), found: 43 }")]
fn test_none_or_up_to_42_large() {
let occurence: Occurences<u32> = Occurences::NoneOrUpTo(Some(42));
occurence.check_count(43).unwrap();
}
}
|
{
self.into()
}
|
posix.py
|
""" Base class for all posixish platforms
"""
from pypy.translator.platform import Platform, log, _run_subprocess
from pypy.tool import autopath
import py, os
class BasePosix(Platform):
exe_ext = ''
def __init__(self, cc=None):
if cc is None:
cc = 'gcc'
self.cc = cc
def
|
(self, libraries):
return ['-l%s' % (lib,) for lib in libraries]
def _libdirs(self, library_dirs):
return ['-L%s' % (ldir,) for ldir in library_dirs]
def _includedirs(self, include_dirs):
return ['-I%s' % (idir,) for idir in include_dirs]
def _linkfiles(self, link_files):
return list(link_files)
def _compile_c_file(self, cc, cfile, compile_args):
oname = cfile.new(ext='o')
args = ['-c'] + compile_args + [str(cfile), '-o', str(oname)]
self._execute_c_compiler(cc, args, oname)
return oname
def _link(self, cc, ofiles, link_args, standalone, exe_name):
args = [str(ofile) for ofile in ofiles] + link_args
args += ['-o', str(exe_name)]
if not standalone:
args = self._args_for_shared(args)
self._execute_c_compiler(cc, args, exe_name)
return exe_name
def _preprocess_dirs(self, include_dirs):
# hook for maemo
return include_dirs
def gen_makefile(self, cfiles, eci, exe_name=None, path=None):
cfiles = [py.path.local(f) for f in cfiles]
cfiles += [py.path.local(f) for f in eci.separate_module_files]
if path is None:
path = cfiles[0].dirpath()
pypypath = py.path.local(autopath.pypydir)
if exe_name is None:
exe_name = cfiles[0].new(ext=self.exe_ext)
m = GnuMakefile(path)
m.exe_name = exe_name
m.eci = eci
def pypyrel(fpath):
rel = py.path.local(fpath).relto(pypypath)
if rel:
return os.path.join('$(PYPYDIR)', rel)
else:
return fpath
rel_cfiles = [m.pathrel(cfile) for cfile in cfiles]
rel_ofiles = [rel_cfile[:-2]+'.o' for rel_cfile in rel_cfiles]
m.cfiles = rel_cfiles
rel_includedirs = [pypyrel(incldir) for incldir in
self._preprocess_dirs(eci.include_dirs)]
m.comment('automatically generated makefile')
definitions = [
('PYPYDIR', autopath.pypydir),
('TARGET', exe_name.basename),
('DEFAULT_TARGET', '$(TARGET)'),
('SOURCES', rel_cfiles),
('OBJECTS', rel_ofiles),
('LIBS', self._libs(eci.libraries)),
('LIBDIRS', self._libdirs(eci.library_dirs)),
('INCLUDEDIRS', self._includedirs(rel_includedirs)),
('CFLAGS', self.cflags + list(eci.compile_extra)),
('LDFLAGS', self.link_flags + list(eci.link_extra)),
('CC', self.cc)
]
for args in definitions:
m.definition(*args)
rules = [
('all', '$(DEFAULT_TARGET)', []),
('$(TARGET)', '$(OBJECTS)', '$(CC) $(LDFLAGS) -o $@ $(OBJECTS) $(LIBDIRS) $(LIBS)'),
('%.o', '%.c', '$(CC) $(CFLAGS) -o $@ -c $< $(INCLUDEDIRS)'),
]
for rule in rules:
m.rule(*rule)
return m
def execute_makefile(self, path_to_makefile):
if isinstance(path_to_makefile, GnuMakefile):
path = path_to_makefile.makefile_dir
else:
path = path_to_makefile
log.execute('make in %s' % (path,))
returncode, stdout, stderr = _run_subprocess('make', ['-C', str(path)])
self._handle_error(returncode, stdout, stderr, path.join('make'))
class Definition(object):
def __init__(self, name, value):
self.name = name
self.value = value
def write(self, f):
def write_list(prefix, lst):
for i, fn in enumerate(lst):
print >> f, prefix, fn,
if i < len(lst)-1:
print >> f, '\\'
else:
print >> f
prefix = ' ' * len(prefix)
name, value = self.name, self.value
if isinstance(value, str):
f.write('%s = %s\n' % (name, value))
else:
write_list('%s =' % (name,), value)
if value:
f.write('\n')
class Rule(object):
def __init__(self, target, deps, body):
self.target = target
self.deps = deps
self.body = body
def write(self, f):
target, deps, body = self.target, self.deps, self.body
if isinstance(deps, str):
dep_s = deps
else:
dep_s = ' '.join(deps)
f.write('%s: %s\n' % (target, dep_s))
if isinstance(body, str):
f.write('\t%s\n' % body)
elif body:
f.write('\t%s\n' % '\n\t'.join(body))
f.write('\n')
class Comment(object):
def __init__(self, body):
self.body = body
def write(self, f):
f.write('# %s\n' % (self.body,))
class GnuMakefile(object):
def __init__(self, path=None):
self.defs = {}
self.lines = []
self.makefile_dir = py.path.local(path)
def pathrel(self, fpath):
if fpath.dirpath() == self.makefile_dir:
return fpath.basename
elif fpath.dirpath().dirpath() == self.makefile_dir.dirpath():
return '../' + fpath.relto(self.makefile_dir.dirpath())
else:
return str(fpath)
def definition(self, name, value):
defs = self.defs
defn = Definition(name, value)
if name in defs:
self.lines[defs[name]] = defn
else:
defs[name] = len(self.lines)
self.lines.append(defn)
def rule(self, target, deps, body):
self.lines.append(Rule(target, deps, body))
def comment(self, body):
self.lines.append(Comment(body))
def write(self, out=None):
if out is None:
f = self.makefile_dir.join('Makefile').open('w')
else:
f = out
for line in self.lines:
line.write(f)
f.flush()
if out is None:
f.close()
|
_libs
|
Landing.tsx
|
import React from 'react';
import { FiArrowRight } from 'react-icons/fi';
import { Link } from 'react-router-dom'
import '../styles/pages/landing.css';
import logoImg from '../images/logo.svg';
function Landing() {
return(
<div id="page-landing">
<div className="content-wrapper">
<img src={logoImg} alt="Happy"/>
<main>
|
</main>
<div className="location">
<strong>Salvador</strong>
<span>Bahia</span>
</div>
<Link to="/app" className="enter-app">
<FiArrowRight size={26} color="rgba(0, 0, 0, 0.6)"/>
</Link>
</div>
</div>
);
}
export default Landing;
|
<h1>Leve felicidade para o mundo</h1>
<p>Visite orfanatos e mude o dia de muitas crianças!</p>
|
action.go
|
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package action
import (
"encoding/json"
"fmt"
"path"
"strings"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/notification/base"
"code.gitea.io/gitea/modules/repository"
)
type actionNotifier struct {
base.NullNotifier
}
var (
_ base.Notifier = &actionNotifier{}
)
// NewNotifier create a new actionNotifier notifier
func NewNotifier() base.Notifier
|
func (a *actionNotifier) NotifyNewIssue(issue *models.Issue) {
if err := issue.LoadPoster(); err != nil {
log.Error("issue.LoadPoster: %v", err)
return
}
if err := issue.LoadRepo(); err != nil {
log.Error("issue.LoadRepo: %v", err)
return
}
repo := issue.Repo
if err := models.NotifyWatchers(&models.Action{
ActUserID: issue.Poster.ID,
ActUser: issue.Poster,
OpType: models.ActionCreateIssue,
Content: fmt.Sprintf("%d|%s", issue.Index, issue.Title),
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
}); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
// NotifyIssueChangeStatus notifies close or reopen issue to notifiers
func (a *actionNotifier) NotifyIssueChangeStatus(doer *models.User, issue *models.Issue, actionComment *models.Comment, closeOrReopen bool) {
// Compose comment action, could be plain comment, close or reopen issue/pull request.
// This object will be used to notify watchers in the end of function.
act := &models.Action{
ActUserID: doer.ID,
ActUser: doer,
Content: fmt.Sprintf("%d|%s", issue.Index, ""),
RepoID: issue.Repo.ID,
Repo: issue.Repo,
Comment: actionComment,
CommentID: actionComment.ID,
IsPrivate: issue.Repo.IsPrivate,
}
// Check comment type.
if closeOrReopen {
act.OpType = models.ActionCloseIssue
if issue.IsPull {
act.OpType = models.ActionClosePullRequest
}
} else {
act.OpType = models.ActionReopenIssue
if issue.IsPull {
act.OpType = models.ActionReopenPullRequest
}
}
// Notify watchers for whatever action comes in, ignore if no action type.
if err := models.NotifyWatchers(act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
// NotifyCreateIssueComment notifies comment on an issue to notifiers
func (a *actionNotifier) NotifyCreateIssueComment(doer *models.User, repo *models.Repository,
issue *models.Issue, comment *models.Comment) {
act := &models.Action{
ActUserID: doer.ID,
ActUser: doer,
RepoID: issue.Repo.ID,
Repo: issue.Repo,
Comment: comment,
CommentID: comment.ID,
IsPrivate: issue.Repo.IsPrivate,
}
content := ""
if len(comment.Content) > 200 {
content = comment.Content[:strings.LastIndex(comment.Content[0:200], " ")] + "…"
} else {
content = comment.Content
}
act.Content = fmt.Sprintf("%d|%s", issue.Index, content)
if issue.IsPull {
act.OpType = models.ActionCommentPull
} else {
act.OpType = models.ActionCommentIssue
}
// Notify watchers for whatever action comes in, ignore if no action type.
if err := models.NotifyWatchers(act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifyNewPullRequest(pull *models.PullRequest) {
if err := pull.LoadIssue(); err != nil {
log.Error("pull.LoadIssue: %v", err)
return
}
if err := pull.Issue.LoadRepo(); err != nil {
log.Error("pull.Issue.LoadRepo: %v", err)
return
}
if err := pull.Issue.LoadPoster(); err != nil {
log.Error("pull.Issue.LoadPoster: %v", err)
return
}
if err := models.NotifyWatchers(&models.Action{
ActUserID: pull.Issue.Poster.ID,
ActUser: pull.Issue.Poster,
OpType: models.ActionCreatePullRequest,
Content: fmt.Sprintf("%d|%s", pull.Issue.Index, pull.Issue.Title),
RepoID: pull.Issue.Repo.ID,
Repo: pull.Issue.Repo,
IsPrivate: pull.Issue.Repo.IsPrivate,
}); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifyRenameRepository(doer *models.User, repo *models.Repository, oldRepoName string) {
log.Trace("action.ChangeRepositoryName: %s/%s", doer.Name, repo.Name)
if err := models.NotifyWatchers(&models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionRenameRepo,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
Content: oldRepoName,
}); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifyTransferRepository(doer *models.User, repo *models.Repository, oldOwnerName string) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionTransferRepo,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
Content: path.Join(oldOwnerName, repo.Name),
}); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifyCreateRepository(doer *models.User, u *models.User, repo *models.Repository) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionCreateRepo,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
}); err != nil {
log.Error("notify watchers '%d/%d': %v", doer.ID, repo.ID, err)
}
}
func (a *actionNotifier) NotifyForkRepository(doer *models.User, oldRepo, repo *models.Repository) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionCreateRepo,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
}); err != nil {
log.Error("notify watchers '%d/%d': %v", doer.ID, repo.ID, err)
}
}
func (a *actionNotifier) NotifyPullRequestReview(pr *models.PullRequest, review *models.Review, comment *models.Comment) {
if err := review.LoadReviewer(); err != nil {
log.Error("LoadReviewer '%d/%d': %v", review.ID, review.ReviewerID, err)
return
}
if err := review.LoadCodeComments(); err != nil {
log.Error("LoadCodeComments '%d/%d': %v", review.Reviewer.ID, review.ID, err)
return
}
var actions = make([]*models.Action, 0, 10)
for _, lines := range review.CodeComments {
for _, comments := range lines {
for _, comm := range comments {
actions = append(actions, &models.Action{
ActUserID: review.Reviewer.ID,
ActUser: review.Reviewer,
Content: fmt.Sprintf("%d|%s", review.Issue.Index, strings.Split(comm.Content, "\n")[0]),
OpType: models.ActionCommentPull,
RepoID: review.Issue.RepoID,
Repo: review.Issue.Repo,
IsPrivate: review.Issue.Repo.IsPrivate,
Comment: comm,
CommentID: comm.ID,
})
}
}
}
if review.Type != models.ReviewTypeComment || strings.TrimSpace(comment.Content) != "" {
action := &models.Action{
ActUserID: review.Reviewer.ID,
ActUser: review.Reviewer,
Content: fmt.Sprintf("%d|%s", review.Issue.Index, strings.Split(comment.Content, "\n")[0]),
RepoID: review.Issue.RepoID,
Repo: review.Issue.Repo,
IsPrivate: review.Issue.Repo.IsPrivate,
Comment: comment,
CommentID: comment.ID,
}
switch review.Type {
case models.ReviewTypeApprove:
action.OpType = models.ActionApprovePullRequest
case models.ReviewTypeReject:
action.OpType = models.ActionRejectPullRequest
default:
action.OpType = models.ActionCommentPull
}
actions = append(actions, action)
}
if err := models.NotifyWatchersActions(actions); err != nil {
log.Error("notify watchers '%d/%d': %v", review.Reviewer.ID, review.Issue.RepoID, err)
}
}
func (*actionNotifier) NotifyMergePullRequest(pr *models.PullRequest, doer *models.User) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: models.ActionMergePullRequest,
Content: fmt.Sprintf("%d|%s", pr.Issue.Index, pr.Issue.Title),
RepoID: pr.Issue.Repo.ID,
Repo: pr.Issue.Repo,
IsPrivate: pr.Issue.Repo.IsPrivate,
}); err != nil {
log.Error("NotifyWatchers [%d]: %v", pr.ID, err)
}
}
func (a *actionNotifier) NotifySyncPushCommits(pusher *models.User, repo *models.Repository, refName, oldCommitID, newCommitID string, commits *repository.PushCommits) {
data, err := json.Marshal(commits)
if err != nil {
log.Error("json.Marshal: %v", err)
return
}
if err := models.NotifyWatchers(&models.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(),
OpType: models.ActionMirrorSyncPush,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
RefName: refName,
Content: string(data),
}); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifySyncCreateRef(doer *models.User, repo *models.Repository, refType, refFullName string) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(),
OpType: models.ActionMirrorSyncCreate,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
RefName: refFullName,
}); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifySyncDeleteRef(doer *models.User, repo *models.Repository, refType, refFullName string) {
if err := models.NotifyWatchers(&models.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(),
OpType: models.ActionMirrorSyncCreate,
RepoID: repo.ID,
Repo: repo,
IsPrivate: repo.IsPrivate,
RefName: refFullName,
}); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
func (a *actionNotifier) NotifyNewRelease(rel *models.Release) {
if err := rel.LoadAttributes(); err != nil {
log.Error("NotifyNewRelease: %v", err)
return
}
if err := models.NotifyWatchers(&models.Action{
ActUserID: rel.PublisherID,
ActUser: rel.Publisher,
OpType: models.ActionPublishRelease,
RepoID: rel.RepoID,
Repo: rel.Repo,
IsPrivate: rel.Repo.IsPrivate,
Content: rel.Title,
RefName: rel.TagName,
}); err != nil {
log.Error("notifyWatchers: %v", err)
}
}
|
{
return &actionNotifier{}
}
|
VaspTestKPOINT.py
|
#!/home/zhangzhengde/bin/bin/python3
#coding=utf-8
import os
import argparse
import VaspCZ.zzdlib as zzd
|
def modify_vasp_sh(jobname, nodes, ppn):
with open('./Vasp.sh', 'r') as f:
data = f.readlines()
new_data = []
for line in data:
if ' #PBS -N' in line:
new_data.append(f' #PBS -N {jobname}\n')
elif ' #PBS -l nodes' in line:
new_data.append(f' #PBS -l nodes={nodes}:ppn={ppn}\n')
else:
new_data.append(line)
with open('./Vasp.sh', 'w') as f:
f.writelines(new_data)
def run(jobname, nodes, ppn, K):
input_files = 'INCAR,POSCAR,POTCAR,KPOINTS'.split(',')
for i in input_files:
if i not in os.listdir():
raise NameError(f'ENCUT Test: input file "{i}" missing in current dir.')
if os.path.isdir(K): # 有目录什么也不做
print(f'k_mesh:{K} already exists, do nothing.')
pass
else:
os.system('mkdir '+K) # 创建目录
for file in input_files:
if os.path.isfile(file):
os.system(f'cp {file} {K}')# 拷贝输入文件
os.chdir(K) # 进入创建的目录
vasp_sh_path = zzd.File.Vaspsh_path()
os.system(f'cp {vasp_sh_path}/Vasp.sh .')
# 无需修改INCAT
# 无需修改POTCAR
# 无需修改POSCAR
# 修改KPOINTS
with open('./KPOINTS', 'r') as f:
data = f.readlines()
data[3] = f'{K[0]} {K[1]} {K[2]}\n'
with open('./KPOINTS', 'w') as f:
f.writelines(data)
# 修改Vasp.sh,指定任务和任务名,修改,提交任务
modify_vasp_sh(f'{jobname}_{K}', nodes, ppn)
# 测试代码,打印
## os.system('cat KPOINTS')
## os.system('cat Vasp.sh')
# os.system('qsub Vasp.sh') # 提交任务
zzd.Vasp.check_and_qsub()
os.chdir('..')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-jb', '--jobname_prefix', default='k_test', type=str)
parser.add_argument('-nd', '--nodes', default='1', type=str)
parser.add_argument('-np', '--ppn', default='8', type=str)
parser.add_argument('-k', '--k_mesh', default='111,333,555,777,999', type=str)
args = parser.parse_args()
jobname = args.jobname_prefix
nodes = args.nodes
k_mesh = args.k_mesh.split(',')
ppn = args.ppn
print(f'running k_point test \n parameter: \njobname_prefix:{jobname} nodes:{nodes} ppn:{ppn} \nk_mesh:{k_mesh}')
inp = input('confirm run ([y]es/no): ')
if inp in ['', 'y', 'yes', 'Y', 'Yes', 'YES']:
for K in k_mesh:
run(jobname, nodes, ppn, K)
else:
print('Did not run.')
| |
main.go
|
// Warm Up
//
// Build a command-line game store.
//
// 1. Declare the following structs:
//
// + item: id (int), name (string), price (int)
//
// + game: embed the item, genre (string)
//
//
// 2. Create a game slice using the following data:
//
// id name price genre
//
// 1 god of war 50 action adventure
// 2 x-com 2 30 strategy
// 3 minecraft 20 sandbox
//
//
// 3. Print all the games.
package main
|
func main() {
type item struct {
id int
name string
price int
}
type game struct {
item
genre string
}
games := []game{
{
item: item{id: 1, name: "god of war", price: 50},
genre: "action adventure",
},
{item: item{id: 2, name: "x-com 2", price: 40}, genre: "strategy"},
{item: item{id: 3, name: "minecraft", price: 20}, genre: "sandbox"},
}
fmt.Printf("Aditya's game store has %d games.\n\n", len(games))
for _, g := range games {
fmt.Printf("#%d: %-15q %-20s $%d\n",
g.id, g.name, "("+g.genre+")", g.price)
}
}
|
import "fmt"
|
test_cox_poisson.py
|
import unittest
import numpy as np
from hmc.applications.cox_poisson import forward_transform, inverse_transform, generate_data, gaussian_posterior_factory, hyperparameter_posterior_factory
from hmc.applications.cox_poisson.prior import log_prior, grad_log_prior, hess_log_prior, grad_hess_log_prior
class TestCoxPoisson(unittest.TestCase):
def test_prior(self):
def transformed_log_prior(qt):
return log_prior(*inverse_transform(qt)[0])
|
transformed_grad_log_prior = lambda qt: grad_log_prior(*qt)
transformed_hess_log_prior = lambda qt: hess_log_prior(*qt)
transformed_grad_hess_log_prior = lambda qt: grad_hess_log_prior(*qt)
q = np.random.uniform(size=(2, ))
qt, _ = forward_transform(q)
delta = 1e-5
u = np.random.normal(size=qt.shape)
fd = (transformed_log_prior(qt + 0.5*delta*u) - transformed_log_prior(qt - 0.5*delta*u)) / delta
dd = transformed_grad_log_prior(qt)@u
self.assertTrue(np.allclose(fd, dd))
fd = (transformed_grad_log_prior(qt + 0.5*delta*u) - transformed_grad_log_prior(qt - 0.5*delta*u)) / delta
dd = transformed_hess_log_prior(qt)@u
self.assertTrue(np.allclose(fd, dd))
fd = (transformed_hess_log_prior(qt + 0.5*delta*u) - transformed_hess_log_prior(qt - 0.5*delta*u)) / delta
dd = transformed_grad_hess_log_prior(qt)@u
self.assertTrue(np.allclose(fd, dd))
def test_gaussian_posterior(self):
sigmasq, beta = np.random.uniform(size=(2, ))
mu = np.log(126.0) - sigmasq / 2.0
dist, x, y = generate_data(10, mu, beta, sigmasq)
euclidean_auxiliaries, metric = gaussian_posterior_factory(dist, mu, sigmasq, beta, y)
log_posterior = lambda x: euclidean_auxiliaries(x)[0]
grad_log_posterior = lambda x: euclidean_auxiliaries(x)[1]
delta = 1e-6
u = np.random.normal(size=x.shape)
fd = (log_posterior(x + 0.5*delta*u) - log_posterior(x - 0.5*delta*u)) / delta
dd = grad_log_posterior(x)@u
self.assertTrue(np.allclose(fd, dd))
def test_hyperparameter_posterior(self):
sigmasq, beta = np.random.uniform(size=(2, ))
mu = np.log(126.0) - sigmasq / 2.0
dist, x, y = generate_data(16, mu, beta, sigmasq)
log_posterior, metric, _, euclidean_auxiliaries, riemannian_auxiliaries = hyperparameter_posterior_factory(dist, mu, x, y)
grad_log_posterior = lambda qt: euclidean_auxiliaries(qt)[1]
grad_metric = lambda qt: riemannian_auxiliaries(qt)[3]
q = np.array([sigmasq, beta])
qt, _ = forward_transform(q)
delta = 1e-4
u = np.random.normal(size=(2, ))
fd = (log_posterior(qt + 0.5*delta*u) - log_posterior(qt - 0.5*delta*u)) / delta
dd = grad_log_posterior(qt)@u
self.assertTrue(np.allclose(fd, dd))
fd = (metric(qt + 0.5*delta*u) - metric(qt - 0.5*delta*u)) / delta
dd = grad_metric(qt)@u
self.assertTrue(np.allclose(fd, dd))
| |
index.ts
|
import { combineEpics } from 'redux-observable';
|
const rootEpic = combineEpics(
...Object.values(homeEpics),
);
export default rootEpic;
|
import * as homeEpics from './homeEpics';
// prettier-ignore
|
slider.rs
|
use crate::layout::{stack_vertically, ContainerOrientation, Widget};
use crate::widgets::text_box::TextBox;
use crate::{
hotkey, Canvas, Color, EventCtx, EventLoopMode, GeomBatch, GfxCtx, InputResult, Key, Line,
ModalMenu, MultiKey, ScreenDims, ScreenPt, ScreenRectangle, Text, Warper,
};
use geom::{Distance, Duration, Polygon, Pt2D};
pub struct Slider {
current_percent: f64,
mouse_on_slider: bool,
dragging: bool,
top_left: ScreenPt,
dims: Dims,
}
impl Slider {
pub fn new() -> Slider {
Slider {
current_percent: 0.0,
mouse_on_slider: false,
dragging: false,
// Placeholders
top_left: ScreenPt::new(0.0, 0.0),
dims: Dims::fit_total_width(10.0),
}
}
pub fn get_percent(&self) -> f64 {
self.current_percent
}
pub fn get_value(&self, num_items: usize) -> usize {
(self.current_percent * (num_items as f64 - 1.0)) as usize
}
pub fn set_percent(&mut self, ctx: &EventCtx, percent: f64) {
assert!(percent >= 0.0 && percent <= 1.0);
self.current_percent = percent;
// Just reset dragging, to prevent chaos
self.dragging = false;
let pt = ctx.canvas.get_cursor_in_screen_space();
self.mouse_on_slider = self.slider_geom().contains_pt(Pt2D::new(pt.x, pt.y));
}
pub fn set_value(&mut self, ctx: &EventCtx, idx: usize, num_items: usize) {
self.set_percent(ctx, (idx as f64) / (num_items as f64 - 1.0));
}
// Returns true if the percentage changed.
pub fn
|
(&mut self, ctx: &mut EventCtx) -> bool {
if self.dragging {
if ctx.input.get_moved_mouse().is_some() {
let percent = (ctx.canvas.get_cursor_in_screen_space().x
- self.dims.horiz_padding
- self.top_left.x)
/ self.dims.bar_width;
self.current_percent = percent.min(1.0).max(0.0);
return true;
}
if ctx.input.left_mouse_button_released() {
self.dragging = false;
}
} else {
if ctx.redo_mouseover() {
let pt = ctx.canvas.get_cursor_in_screen_space();
self.mouse_on_slider = self.slider_geom().contains_pt(Pt2D::new(pt.x, pt.y));
}
if ctx.input.left_mouse_button_pressed() {
if self.mouse_on_slider {
self.dragging = true;
} else {
// Did we click somewhere else on the bar?
let pt = ctx.canvas.get_cursor_in_screen_space();
if Polygon::rectangle_topleft(
Pt2D::new(
self.dims.horiz_padding + self.top_left.x,
self.dims.vert_padding + self.top_left.y,
),
Distance::meters(self.dims.bar_width),
Distance::meters(self.dims.bar_height),
)
.contains_pt(Pt2D::new(pt.x, pt.y))
{
let percent = (pt.x - self.dims.horiz_padding - self.top_left.x)
/ self.dims.bar_width;
self.current_percent = percent.min(1.0).max(0.0);
self.mouse_on_slider = true;
self.dragging = true;
return true;
}
}
}
}
false
}
pub fn draw(&self, g: &mut GfxCtx) {
// TODO Cache the batch
let mut batch = GeomBatch::new();
// A nice background for the entire thing
batch.push(
Color::grey(0.3),
Polygon::rectangle_topleft(
Pt2D::new(self.top_left.x, self.top_left.y),
Distance::meters(self.dims.total_width),
Distance::meters(self.dims.bar_height + 2.0 * self.dims.vert_padding),
),
);
g.canvas.mark_covered_area(ScreenRectangle {
x1: self.top_left.x,
y1: self.top_left.y,
x2: self.top_left.x + self.dims.total_width,
y2: self.top_left.y + self.dims.bar_height + 2.0 * self.dims.vert_padding,
});
// The bar
batch.push(
Color::WHITE,
Polygon::rectangle_topleft(
Pt2D::new(
self.top_left.x + self.dims.horiz_padding,
self.top_left.y + self.dims.vert_padding,
),
Distance::meters(self.dims.bar_width),
Distance::meters(self.dims.bar_height),
),
);
// Show the progress
if self.current_percent != 0.0 {
batch.push(
Color::GREEN,
Polygon::rectangle_topleft(
Pt2D::new(
self.top_left.x + self.dims.horiz_padding,
self.top_left.y + self.dims.vert_padding,
),
Distance::meters(self.current_percent * self.dims.bar_width),
Distance::meters(self.dims.bar_height),
),
);
}
// The actual slider
batch.push(
if self.mouse_on_slider {
Color::YELLOW
} else {
Color::grey(0.7)
},
self.slider_geom(),
);
g.fork_screenspace();
batch.draw(g);
g.unfork();
}
fn slider_geom(&self) -> Polygon {
Polygon::rectangle_topleft(
Pt2D::new(
self.top_left.x
+ self.dims.horiz_padding
+ self.current_percent * self.dims.bar_width
- (self.dims.slider_width / 2.0),
self.top_left.y + self.dims.vert_padding
- (self.dims.slider_height - self.dims.bar_height) / 2.0,
),
Distance::meters(self.dims.slider_width),
Distance::meters(self.dims.slider_height),
)
}
}
impl Widget for Slider {
fn get_dims(&self) -> ScreenDims {
ScreenDims::new(
self.dims.total_width,
self.dims.bar_height + 2.0 * self.dims.vert_padding,
)
}
fn set_pos(&mut self, top_left: ScreenPt, total_width: f64) {
self.top_left = top_left;
self.dims = Dims::fit_total_width(total_width);
}
}
#[derive(Debug)]
struct Dims {
// Pixels
bar_width: f64,
bar_height: f64,
slider_width: f64,
slider_height: f64,
horiz_padding: f64,
vert_padding: f64,
total_width: f64,
}
impl Dims {
fn fit_total_width(total_width: f64) -> Dims {
let horiz_padding = total_width / 7.0;
let bar_width = total_width - 2.0 * horiz_padding;
let slider_width = bar_width / 6.0;
let bar_height = bar_width / 3.0;
let slider_height = bar_height * 1.2;
let vert_padding = bar_height / 5.0;
Dims {
bar_width,
bar_height,
slider_width,
slider_height,
horiz_padding,
vert_padding,
total_width,
}
}
}
pub struct ItemSlider<T> {
items: Vec<(T, Text)>,
slider: Slider,
menu: ModalMenu,
noun: String,
prev: String,
next: String,
first: String,
last: String,
}
impl<T> ItemSlider<T> {
pub fn new(
items: Vec<(T, Text)>,
menu_title: &str,
noun: &str,
other_choices: Vec<(Option<MultiKey>, &str)>,
ctx: &EventCtx,
) -> ItemSlider<T> {
// Lifetime funniness...
let mut choices = other_choices.clone();
let prev = format!("previous {}", noun);
let next = format!("next {}", noun);
let first = format!("first {}", noun);
let last = format!("last {}", noun);
choices.extend(vec![
(hotkey(Key::LeftArrow), prev.as_str()),
(hotkey(Key::RightArrow), next.as_str()),
(hotkey(Key::Comma), first.as_str()),
(hotkey(Key::Dot), last.as_str()),
]);
ItemSlider {
items,
slider: Slider::new(),
menu: ModalMenu::new(menu_title, choices, ctx).disable_standalone_layout(),
noun: noun.to_string(),
prev,
next,
first,
last,
}
}
// Returns true if the value changed.
pub fn event(&mut self, ctx: &mut EventCtx) -> bool {
{
let idx = self.slider.get_value(self.items.len());
let mut txt = Text::from(Line(format!(
"{} {}/{}",
self.noun,
abstutil::prettyprint_usize(idx + 1),
abstutil::prettyprint_usize(self.items.len())
)));
txt.extend(&self.items[idx].1);
self.menu.set_info(ctx, txt);
self.menu.event(ctx);
}
stack_vertically(
ContainerOrientation::TopRight,
ctx.canvas,
vec![&mut self.slider, &mut self.menu],
);
let current = self.slider.get_value(self.items.len());
if current != self.items.len() - 1 && self.menu.action(&self.next) {
self.slider.set_value(ctx, current + 1, self.items.len());
} else if current != self.items.len() - 1 && self.menu.action(&self.last) {
self.slider.set_percent(ctx, 1.0);
} else if current != 0 && self.menu.action(&self.prev) {
self.slider.set_value(ctx, current - 1, self.items.len());
} else if current != 0 && self.menu.action(&self.first) {
self.slider.set_percent(ctx, 0.0);
}
self.slider.event(ctx);
self.slider.get_value(self.items.len()) != current
}
pub fn draw(&self, g: &mut GfxCtx) {
self.menu.draw(g);
self.slider.draw(g);
}
pub fn get(&self) -> (usize, &T) {
let idx = self.slider.get_value(self.items.len());
(idx, &self.items[idx].0)
}
pub fn action(&mut self, name: &str) -> bool {
self.menu.action(name)
}
// TODO Consume self
pub fn consume_all_items(&mut self) -> Vec<(T, Text)> {
std::mem::replace(&mut self.items, Vec::new())
}
}
pub struct WarpingItemSlider<T> {
slider: ItemSlider<(Pt2D, T)>,
warper: Option<Warper>,
}
impl<T> WarpingItemSlider<T> {
// Note other_choices is hardcoded to quitting.
pub fn new(
items: Vec<(Pt2D, T, Text)>,
menu_title: &str,
noun: &str,
ctx: &EventCtx,
) -> WarpingItemSlider<T> {
WarpingItemSlider {
warper: Some(Warper::new(ctx, items[0].0, None)),
slider: ItemSlider::new(
items
.into_iter()
.map(|(pt, obj, label)| ((pt, obj), label))
.collect(),
menu_title,
noun,
vec![(hotkey(Key::Escape), "quit")],
ctx,
),
}
}
// Done when None. If the bool is true, done warping.
pub fn event(&mut self, ctx: &mut EventCtx) -> Option<(EventLoopMode, bool)> {
// Don't block while we're warping
let (ev_mode, done_warping) = if let Some(ref warper) = self.warper {
if let Some(mode) = warper.event(ctx) {
(mode, false)
} else {
self.warper = None;
(EventLoopMode::InputOnly, true)
}
} else {
(EventLoopMode::InputOnly, false)
};
let changed = self.slider.event(ctx);
if self.slider.action("quit") {
return None;
} else if !changed {
return Some((ev_mode, done_warping));
}
let (_, (pt, _)) = self.slider.get();
self.warper = Some(Warper::new(ctx, *pt, None));
// We just created a new warper, so...
Some((EventLoopMode::Animation, done_warping))
}
pub fn draw(&self, g: &mut GfxCtx) {
self.slider.draw(g);
}
pub fn get(&self) -> (usize, &T) {
let (idx, (_, data)) = self.slider.get();
(idx, data)
}
}
impl<T: PartialEq> WarpingItemSlider<T> {
pub fn override_initial_value(&mut self, item: T, ctx: &EventCtx) {
let idx = self
.slider
.items
.iter()
.position(|((_, x), _)| x == &item)
.unwrap();
self.slider
.slider
.set_value(ctx, idx, self.slider.items.len());
self.warper = None;
}
}
// TODO Hardcoded to Durations right now...
pub struct SliderWithTextBox {
slider: Slider,
tb: TextBox,
low: Duration,
high: Duration,
}
impl SliderWithTextBox {
pub fn new(prompt: &str, low: Duration, high: Duration, canvas: &Canvas) -> SliderWithTextBox {
SliderWithTextBox {
slider: Slider::new(),
tb: TextBox::new(prompt, None, canvas),
low,
high,
}
}
pub fn event(&mut self, ctx: &mut EventCtx) -> InputResult<Duration> {
ctx.canvas.handle_event(ctx.input);
stack_vertically(
ContainerOrientation::Centered,
ctx.canvas,
vec![&mut self.slider, &mut self.tb],
);
if self.slider.event(ctx) {
let value = self.low + self.slider.get_percent() * (self.high - self.low);
self.tb.set_text(value.to_string());
InputResult::StillActive
} else {
let line_before = self.tb.get_line().to_string();
match self.tb.event(ctx.input) {
InputResult::Done(line, _) => {
if let Ok(t) = Duration::parse(&line) {
if t >= self.low && t <= self.high {
return InputResult::Done(line, t);
}
}
println!("Bad input {}", line);
InputResult::Canceled
}
InputResult::StillActive => {
if line_before != self.tb.get_line() {
if let Ok(t) = Duration::parse(self.tb.get_line()) {
if t >= self.low && t <= self.high {
self.slider
.set_percent(ctx, (t - self.low) / (self.high - self.low));
}
}
}
InputResult::StillActive
}
InputResult::Canceled => InputResult::Canceled,
}
}
}
pub fn draw(&self, g: &mut GfxCtx) {
self.slider.draw(g);
self.tb.draw(g);
}
}
|
event
|
files.go
|
package util
import (
"fmt"
"io"
"io/ioutil"
"os"
"os/user"
"path/filepath"
"regexp"
"sort"
"strings"
"github.com/mattn/go-zglob"
)
const (
// FileMode is default file mode
FileMode = 0644
// DirFileMode is default directory file mode
DirFileMode = 0755
)
// ReadFile reads given file and return it as a byte slice:
// - file: the file to read
// Return:
// - content as a slice of bytes
// - an error if something went wrong
func ReadFile(file string) ([]byte, error) {
path := ExpandUserHome(file)
bytes, err := ioutil.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("reading file '%s': %v", file, err)
}
return bytes, nil
}
// FileExists tells if given file exists:
// - file: the name of the file to test
// Return: a boolean that tells if file exists
func FileExists(file string) bool {
file = ExpandUserHome(file)
if stat, err := os.Stat(file); err == nil && !stat.IsDir() {
return true
}
return false
}
// DirExists tells if directory exists:
// - dir: directory to test
// Return: a boolean that tells if directory exists
func DirExists(dir string) bool {
dir = ExpandUserHome(dir)
if stat, err := os.Stat(dir); err == nil && stat.IsDir() {
return true
}
return false
}
// FileIsExecutable tells if given file is executable by user:
// - file: file to test
// Return: a boolean that tells if file is executable by user
func FileIsExecutable(file string) bool {
if stat, err := os.Stat(file); err == nil && stat.Mode()&0111 != 0 {
return true
}
return false
}
// CopyFile copies source file to destination, preserving mode:
// - source: the source file
// - dest: the destination file
// Return: error if something went wrong
func CopyFile(source, dest string) error {
source = ExpandUserHome(source)
dest = ExpandUserHome(dest)
from, err := os.Open(source)
if err != nil {
return fmt.Errorf("opening source file '%s': %v", source, err)
}
info, err := from.Stat()
if err != nil {
return fmt.Errorf("getting mode of source file '%s': %v", source, err)
}
defer from.Close()
to, err := os.Create(dest)
if err != nil {
return fmt.Errorf("creating desctination file '%s': %v", dest, err)
}
defer to.Close()
|
if err != nil {
return fmt.Errorf("copying file: %v", err)
}
err = to.Sync()
if err != nil {
return fmt.Errorf("syncing destination file: %v", err)
}
if !Windows() {
err = to.Chmod(info.Mode())
if err != nil {
return fmt.Errorf("changing mode of destination file '%s': %v", dest, err)
}
}
return nil
}
// CopyFilesToDir copies files in root directory to destination directory:
// - dir: root directory
// - files: globs of source files
// - toDir: destination directory
// - flatten: tells if files should be flatten in destination directory
// Return: an error if something went wrong
func CopyFilesToDir(dir string, files []string, toDir string, flatten bool) error {
if stat, err := os.Stat(toDir); err != nil || !stat.IsDir() {
return fmt.Errorf("destination directory doesn't exist")
}
for _, file := range files {
source := file
if !filepath.IsAbs(file) {
source = filepath.Join(dir, file)
}
var dest string
if flatten || filepath.IsAbs(file) {
base := filepath.Base(file)
dest = filepath.Join(toDir, base)
} else {
dest = filepath.Join(toDir, file)
destDir := filepath.Dir(dest)
if !DirExists(destDir) {
err := os.MkdirAll(destDir, DirFileMode)
if err != nil {
return fmt.Errorf("creating directory for destination file: %v", err)
}
}
}
err := CopyFile(source, dest)
if err != nil {
return err
}
}
return nil
}
// MoveFilesToDir moves files in source directory to destination:
// - dir: root directory of source files
// - files: globs of files to move
// - toDir: destination directory
// - flatten: tells if files should be flatten in destination directory
// Return: an error if something went wrong
func MoveFilesToDir(dir string, files []string, toDir string, flatten bool) error {
dir = ExpandUserHome(dir)
toDir = ExpandUserHome(toDir)
if stat, err := os.Stat(toDir); err != nil || !stat.IsDir() {
return fmt.Errorf("destination directory doesn't exist")
}
for _, file := range files {
file = ExpandUserHome(file)
source := filepath.Join(dir, file)
var dest string
if flatten {
base := filepath.Base(file)
dest = filepath.Join(toDir, base)
} else {
dest = filepath.Join(toDir, file)
destDir := filepath.Dir(dest)
if !DirExists(destDir) {
err := os.MkdirAll(destDir, DirFileMode)
if err != nil {
return fmt.Errorf("creating directory for destination file: %v", err)
}
}
}
err := os.Rename(source, dest)
if err != nil {
return err
}
}
return nil
}
// ExpandUserHome expand path starting with "~/":
// - path: the path to expand
// Return: expanded path
func ExpandUserHome(path string) string {
if strings.HasPrefix(path, "~/") {
user, _ := user.Current()
home := user.HomeDir
path = filepath.Join(home, path[2:])
}
return path
}
// PathToUnix turns a path to Unix format (with "/"):
// - path: path to turn to unix format
// Return: converted path
func PathToUnix(path string) string {
// replace path separator \ with /
path = strings.Replace(path, "\\", "/", -1)
// replace c: with /c
r := regexp.MustCompile("^[A-Za-z]:.*$")
if r.MatchString(path) {
path = "/" + path[0:1] + path[2:]
}
return path
}
// PathToWindows turns a path to Windows format (with "\"):
// - path: path to turn to windows format
// Return: converted path
func PathToWindows(path string) string {
// replace path separator / with \
path = strings.Replace(path, "/", "\\", -1)
// replace /c/ with c:/
r := regexp.MustCompile(`^\\[A-Za-z]\\.*$`)
if r.MatchString(path) {
path = path[1:2] + ":" + path[2:]
}
return path
}
// FindFiles finds files in the context:
// - dir: the search root directory (current dir if empty)
// - includes: the list of globs to include
// - excludes: the list of globs to exclude
// - folder: tells if we should include folders
// Return the list of files as a slice of strings
func FindFiles(dir string, includes, excludes []string, folder bool) ([]string, error) {
var err error
included := joinPath(dir, includes)
excluded := joinPath(dir, excludes)
included, err = filterFolders(included, folder)
if err != nil {
return nil, err
}
files := filterExcluded(included, excluded)
files, err = makeRelative(dir, files)
if err != nil {
return nil, err
}
sort.Strings(files)
return files, nil
}
// FindInPath search given executable in PATH:
// - executable: executable to search.
// Return: list of directories containing executable
func FindInPath(executable string) []string {
path := os.Getenv("PATH")
dirs := strings.Split(path, string(os.PathListSeparator))
var paths []string
for _, dir := range dirs {
file := filepath.Join(dir, executable)
if FileIsExecutable(file) {
paths = append(paths, file)
}
}
return paths
}
func joinPath(dir string, paths []string) []string {
var joined []string
for _, path := range paths {
if !filepath.IsAbs(path) {
path = filepath.Join(dir, path)
}
joined = append(joined, path)
}
return joined
}
func filterFolders(included []string, folder bool) ([]string, error) {
var candidates []string
for _, include := range included {
list, _ := zglob.Glob(include)
for _, file := range list {
stat, err := os.Stat(file)
if err != nil {
return nil, fmt.Errorf("stating file: %v", err)
}
if stat.Mode().IsRegular() ||
(stat.Mode().IsDir() && folder) {
candidates = append(candidates, file)
}
}
}
return candidates, nil
}
func filterExcluded(candidates []string, excluded []string) []string {
var files []string
if excluded != nil {
for index, file := range candidates {
for _, exclude := range excluded {
match, err := zglob.Match(exclude, file)
if match || err != nil {
candidates[index] = ""
}
}
}
for _, file := range candidates {
if file != "" {
files = append(files, file)
}
}
} else {
files = candidates
}
return files
}
func makeRelative(dir string, files []string) ([]string, error) {
var err error
for index, file := range files {
if dir != "" {
files[index], err = filepath.Rel(dir, file)
if err != nil {
return nil, err
}
}
}
return files, nil
}
|
_, err = io.Copy(to, from)
|
ChemkinPickler.py
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Michael A.G. Aivazis
# California Institute of Technology
# (C) 1998-2003 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from weaver.mills.LineMill import LineMill
class ChemkinPickler(LineMill):
names = ["chemkin"]
def _renderDocument(self, mechanism, options=None):
self.pickleElementSection(mechanism)
self.pickleSpeciesSection(mechanism)
self.pickleThermoSection(mechanism)
self.pickleReactionSection(mechanism)
return
def pickleElementSection(self, mechanism):
self._rep += ["", "! Element section", "", "Elements"]
line = " " * 4
for element in mechanism.element():
symbol = element.symbol
if len(line) + len(symbol) > 75:
self._rep.append(line)
line = " " * 4
line += " " + symbol
self._rep.append(line)
self._rep.append("End")
return
def pickleSpeciesSection(self, mechanism):
self._rep += ["", "! Species section", "", "Species"]
line = " " * 4
for species in mechanism.species():
symbol = species.symbol
if len(line) + len(symbol) > 75:
self._rep.append(line)
line = " " * 4
line += " " + symbol
self._rep.append(line)
self._rep.append("End")
return
def pickleThermoSection(self, mechanism):
self._rep += ["", "! Thermo section", ""]
line = "Thermo"
if mechanism.thermoAll():
line += " All"
self._rep.append(line)
if mechanism.thermoRange():
line = "%15.8g " * 3 % mechanism.thermoRange()
self._rep.append(line)
format = "%15.8e" * 5 + "%5d"
for species in mechanism.species():
if not species.thermo:
continue
self._rep.append("!")
# compute line 1
line_1 = "%-18s" % species.symbol + " " * 6
composition = [
"%-2s%3d" % (element, factor)
for element, factor in species.composition
]
line_1 += "".join(composition[: min(len(composition), 4)])
line_1 += (" " * 5) * (max(0, 4 - len(composition)))
line_1 += species.phase.upper()
line_1 += "%10.3f" % species.thermo[1].lowT
line_1 += "%10.3f" % species.thermo[0].highT
if species.thermo[1].highT != species.thermo[0].lowT:
import journal
journal.firewall("fuego").hit("bad mechanism")
continue
if species.thermo[1].lowT:
line_1 += "%10.3f" % species.thermo[1].lowT
else:
line_1 += " " * 10
if len(composition) >= 5:
line_1 += "%-2s%2d" % composition[4]
else:
line_1 += " " * 4
line_1 += "1"
self._rep.append(line_1)
# get the thermo parametrization
highParameters = species.thermo[0].parameters
lowParameters = species.thermo[1].parameters
# compute line 2
line_2 = ""
line_2 += "%15.8e" % highParameters[0]
line_2 += "%15.8e" % highParameters[1]
line_2 += "%15.8e" % highParameters[2]
line_2 += "%15.8e" % highParameters[3]
line_2 += "%15.8e" % highParameters[4]
line_2 += " " * 4 + "2"
self._rep.append(line_2)
# compute line 3
line_3 = ""
line_3 += "%15.8e" % highParameters[5]
line_3 += "%15.8e" % highParameters[6]
line_3 += "%15.8e" % lowParameters[0]
line_3 += "%15.8e" % lowParameters[1]
line_3 += "%15.8e" % lowParameters[2]
|
# compute line 4
line_4 = ""
line_4 += "%15.8e" % lowParameters[3]
line_4 += "%15.8e" % lowParameters[4]
line_4 += "%15.8e" % lowParameters[5]
line_4 += "%15.8e" % lowParameters[6]
line_4 += " " * 15
line_4 += " " * 4 + "4"
self._rep.append(line_4)
self._rep.append("")
self._rep.append("End")
return
def pickleReactionSection(self, mechanism):
self._rep.append("")
self._rep.append("! Reaction section")
self._rep.append("")
self._rep.append("Reactions")
i = 0
for reaction in mechanism.reaction():
i += 1
self.pickleReaction(reaction, i)
self._rep.append("")
self._rep.append("End")
return
def pickleReaction(self, reaction, i):
lines = []
form = _printReagents(reaction, reaction.reactants)
if reaction.reversible:
form += " <=> "
else:
form += " => "
form += _printReagents(reaction, reaction.products)
line = "%-40s" % form
line += "%10.3g" % reaction.arrhenius[0]
line += "%10.3g" % reaction.arrhenius[1]
line += "%10.3g" % reaction.arrhenius[2]
line += " " * 5 + "! %5d" % i
lines.append(line)
if reaction.efficiencies:
efficiencies = " "
for species, coefficient in reaction.efficiencies:
efficiencies += "%s / %4.2f / " % (
species,
coefficient + 1,
) # remember adjustment
lines.append(efficiencies)
if reaction.low:
low = " LOW /%s/" % _printParameters(reaction.low)
lines.append(low)
if reaction.troe:
troe = " TROE /%s/" % _printParameters(reaction.troe)
lines.append(troe)
if reaction.sri:
sri = " SRI /%s/" % _printParameters(reaction.sri)
lines.append(sri)
if reaction.rev:
rev = " REV /%s/" % _printParameters(reaction.rev)
lines.append(rev)
if reaction.lt:
lt = " LT /%s/" % _printParameters(reaction.lt)
lines.append(lt)
if reaction.rlt:
rlt = " RLT /%s/" % _printParameters(reaction.rlt)
lines.append(rlt)
if reaction.radiation:
radiation = " HV / %g /" % reaction.radiation
lines.append(radiation)
if reaction.duplicate:
duplicate = " DUPLICATE"
lines.append(duplicate)
self._rep += lines
return lines
def __init__(self, options=None):
LineMill.__init__(self, "!", _FIRSTLINE)
return
# helpers
_FIRSTLINE = "! -*- chemkin -*-"
def _printReagents(reaction, composition):
terms = []
for species, factor in composition:
str = ""
if factor != 1:
str += "%d " % factor
str += species
terms.append(str)
line = " + ".join(terms)
if reaction.thirdBody:
species, factor = reaction.thirdBody
if species == "<mixture>":
species = "M"
if reaction.falloff:
line += " (+"
else:
line += " + "
if factor != 1:
line += "%d" % factor
line += species
if reaction.falloff:
line += ")"
return line
def _printParameters(ptuple):
format = "%10.3e " * len(ptuple)
return format % ptuple
# version
__id__ = "$Id$"
# End of file
|
line_3 += " " * 4 + "3"
self._rep.append(line_3)
|
functional.py
|
#!/usr/bin/python
import os
import re
from datetime import datetime, timedelta
from trac.tests.functional import *
from trac.util.datefmt import utc, localtz, format_date
class TestTickets(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a ticket, comment on it, and attach a file"""
# TODO: this should be split into multiple tests
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.create_ticket()
self._tester.add_comment(ticketid)
self._tester.attach_file_to_ticket(ticketid)
class TestTicketPreview(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Preview ticket creation"""
self._tester.go_to_front()
tc.follow('New Ticket')
summary = random_sentence(5)
desc = random_sentence(5)
tc.formvalue('propertyform', 'field-summary', summary)
tc.formvalue('propertyform', 'field-description', desc)
tc.submit('preview')
tc.url(self._tester.url + '/newticket$')
tc.find('ticket not yet created')
tc.find(summary)
tc.find(desc)
class TestTicketNoSummary(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Creating a ticket without summary should fail"""
self._tester.go_to_front()
tc.follow('New Ticket')
desc = random_sentence(5)
tc.formvalue('propertyform', 'field-description', desc)
tc.submit('submit')
tc.find(desc)
tc.find('Tickets must contain a summary.')
tc.find('Create New Ticket')
tc.find('ticket not yet created')
class TestTicketAltFormats(FunctionalTestCaseSetup):
def runTest(self):
"""Download ticket in alternative formats"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_ticket(ticketid)
for format in ['Comma-delimited Text', 'Tab-delimited Text',
'RSS Feed']:
tc.follow(format)
content = b.get_html()
if content.find(summary) < 0:
raise AssertionError('Summary missing from %s format' % format)
tc.back()
class TestTicketCSVFormat(FunctionalTestCaseSetup):
def runTest(self):
"""Download ticket in CSV format"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_ticket(ticketid)
tc.follow('Comma-delimited Text')
csv = b.get_html()
if not csv.startswith('id,summary,'):
raise AssertionError('Bad CSV format')
class TestTicketTabFormat(FunctionalTestCaseSetup):
def runTest(self):
"""Download ticket in Tab-delimitted format"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_ticket(ticketid)
tc.follow('Tab-delimited Text')
tab = b.get_html()
if not tab.startswith('id\tsummary\t'):
raise AssertionError('Bad tab delimitted format')
class TestTicketRSSFormat(FunctionalTestCaseSetup):
def runTest(self):
"""Download ticket in RSS format"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_ticket(ticketid)
# Make a number of changes to exercise all of the RSS feed code
self._tester.go_to_ticket(ticketid)
tc.formvalue('propertyform', 'comment', random_sentence(3))
tc.formvalue('propertyform', 'field-type', 'task')
tc.formvalue('propertyform', 'description', summary + '\n\n' +
random_sentence(8))
tc.formvalue('propertyform', 'field-keywords', 'key')
tc.submit('submit')
time.sleep(1) # Have to wait a second
tc.formvalue('propertyform', 'field-keywords', '')
tc.submit('submit')
tc.find('RSS Feed')
tc.follow('RSS Feed')
rss = b.get_html()
if not rss.startswith('<?xml version="1.0"?>'):
raise AssertionError('RSS Feed not valid feed')
class TestTicketSearch(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket search"""
summary = random_sentence(4)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_front()
tc.follow('Search')
tc.formvalue('fullsearch', 'ticket', True)
tc.formvalue('fullsearch', 'q', summary)
tc.submit('Search')
tc.find('class="searchable">.*' + summary)
tc.notfind('No matches found')
class TestNonTicketSearch(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test non-ticket search"""
# Create a summary containing only unique words
summary = ' '.join([random_word() + '_TestNonTicketSearch'
for i in range(5)])
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_front()
tc.follow('Search')
tc.formvalue('fullsearch', 'ticket', False)
tc.formvalue('fullsearch', 'q', summary)
tc.submit('Search')
tc.notfind('class="searchable">' + summary)
tc.find('No matches found')
class TestTicketHistory(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket history"""
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
comment = random_sentence(5)
self._tester.add_comment(ticketid, comment=comment)
self._tester.go_to_ticket(ticketid)
url = b.get_url()
tc.go(url + '?version=0')
tc.find('at <[^>]*>*Initial Version')
tc.find(summary)
tc.notfind(comment)
tc.go(url + '?version=1')
tc.find('at <[^>]*>*Version 1')
tc.find(summary)
tc.find(comment)
class TestTicketHistoryDiff(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket history (diff)"""
name = 'TestTicketHistoryDiff'
ticketid = self._tester.create_ticket(name)
self._tester.go_to_ticket(ticketid)
tc.formvalue('propertyform', 'description', random_sentence(6))
tc.submit('submit')
tc.find('Description<[^>]*>\\s*modified \\(<[^>]*>diff', 's')
tc.follow('diff')
tc.find('Changes\\s*between\\s*<[^>]*>Initial Version<[^>]*>\\s*and' \
'\\s*<[^>]*>Version 1<[^>]*>\\s*of\\s*<[^>]*>Ticket #' , 's')
class TestTicketQueryLinks(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket query links"""
count = 3
ticket_ids = [self._tester.create_ticket(
summary='TestTicketQueryLinks%s' % i)
for i in range(count)]
self._tester.go_to_query()
# We don't have the luxury of javascript, so this is a multi-step
# process
tc.formvalue('query', 'add_filter_0', 'summary')
tc.submit('add_0')
tc.formvalue('query', '0_owner', 'nothing')
tc.submit('rm_filter_0_owner_0')
tc.formvalue('query', '0_summary', 'TestTicketQueryLinks')
tc.submit('update')
query_url = b.get_url()
for i in range(count):
tc.find('TestTicketQueryLinks%s' % i)
tc.follow('TestTicketQueryLinks0')
tc.find('class="missing">← Previous Ticket')
tc.find('title="Ticket #%s">Next Ticket' % ticket_ids[1])
tc.follow('Back to Query')
tc.url(re.escape(query_url))
tc.follow('TestTicketQueryLinks1')
tc.find('title="Ticket #%s">Previous Ticket' % ticket_ids[0])
tc.find('title="Ticket #%s">Next Ticket' % ticket_ids[2])
tc.follow('Next Ticket')
tc.find('title="Ticket #%s">Previous Ticket' % ticket_ids[1])
tc.find('class="missing">Next Ticket →')
class TestTicketQueryOrClause(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket query with an or clauses"""
count = 3
ticket_ids = [self._tester.create_ticket(
summary='TestTicketQueryOrClause%s' % i,
info={'keywords': str(i)})
for i in range(count)]
self._tester.go_to_query()
tc.formvalue('query', '0_owner', '')
tc.submit('rm_filter_0_owner_0')
tc.formvalue('query', 'add_filter_0', 'summary')
tc.submit('add_0')
tc.formvalue('query', '0_summary', 'TestTicketQueryOrClause1')
tc.formvalue('query', 'add_clause_1', 'keywords')
tc.submit('add_1')
tc.formvalue('query', '1_keywords', '2')
tc.submit('update')
tc.notfind('TestTicketQueryOrClause0')
for i in [1, 2]:
tc.find('TestTicketQueryOrClause%s' % i)
class TestTimelineTicketDetails(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test ticket details on timeline"""
env = self._testenv.get_trac_environment()
env.config.set('timeline', 'ticket_show_details', 'yes')
env.config.save()
summary = random_sentence(5)
ticketid = self._tester.create_ticket(summary)
self._tester.go_to_ticket(ticketid)
self._tester.add_comment(ticketid)
self._tester.go_to_timeline()
tc.formvalue('prefs', 'ticket_details', True)
tc.submit()
htmltags = '(<[^>]*>)*'
tc.find('Ticket ' + htmltags + '#' + str(ticketid) + htmltags + ' \\(' +
summary + '\\) updated\\s+by\\s+' + htmltags + 'admin', 's')
class TestAdminComponent(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create component"""
self._tester.create_component()
class TestAdminComponentDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate component"""
name = "DuplicateMilestone"
self._tester.create_component(name)
component_url = self._tester.url + "/admin/ticket/components"
tc.go(component_url)
tc.formvalue('addcomponent', 'name', name)
tc.submit()
tc.notfind(internal_error)
tc.find('Component .* already exists')
class TestAdminComponentRemoval(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove component"""
name = "RemovalComponent"
self._tester.create_component(name)
component_url = self._tester.url + "/admin/ticket/components"
tc.go(component_url)
tc.formvalue('component_table', 'sel', name)
tc.submit('remove')
tc.notfind(name)
class TestAdminComponentNonRemoval(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove no selected component"""
component_url = self._tester.url + "/admin/ticket/components"
tc.go(component_url)
tc.formvalue('component_table', 'remove', 'Remove selected items')
tc.submit('remove')
tc.find('No component selected')
class TestAdminComponentDefault(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin set default component"""
name = "DefaultComponent"
self._tester.create_component(name)
component_url = self._tester.url + "/admin/ticket/components"
tc.go(component_url)
tc.formvalue('component_table', 'default', name)
tc.submit('apply')
tc.find('type="radio" name="default" value="%s" checked="checked"' % \
name)
tc.go(self._tester.url + '/newticket')
tc.find('<option selected="selected" value="%s">%s</option>'
% (name, name))
class TestAdminComponentDetail(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin component detail"""
name = "DetailComponent"
self._tester.create_component(name)
component_url = self._tester.url + "/admin/ticket/components"
tc.go(component_url)
tc.follow(name)
desc = 'Some component description'
tc.formvalue('modcomp', 'description', desc)
tc.submit('cancel')
tc.url(component_url + '$')
tc.follow(name)
tc.notfind(desc)
class TestAdminMilestone(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create milestone"""
self._tester.create_milestone()
class TestAdminMilestoneSpace(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create milestone with a space"""
self._tester.create_milestone('Milestone 1')
class TestAdminMilestoneDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate milestone"""
name = "DuplicateMilestone"
self._tester.create_milestone(name)
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.formvalue('addmilestone', 'name', name)
tc.submit()
tc.notfind(internal_error)
tc.find('Milestone %s already exists' % name)
tc.notfind('%s')
class TestAdminMilestoneDetail(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin modify milestone details"""
name = "DetailMilestone"
# Create a milestone
self._tester.create_milestone(name)
# Modify the details of the milestone
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.follow(name)
tc.url(milestone_url + '/' + name)
tc.formvalue('modifymilestone', 'description', 'Some description.')
tc.submit('save')
tc.url(milestone_url)
# Make sure the milestone isn't closed
self._tester.go_to_roadmap()
tc.find(name)
# Cancel more modifications
tc.go(milestone_url)
tc.url(milestone_url)
tc.follow(name)
tc.formvalue('modifymilestone', 'description',
'~~Some other description.~~')
tc.submit('cancel')
tc.url(milestone_url)
# Verify the correct modifications show up
self._tester.go_to_roadmap()
tc.find('Some description.')
tc.follow(name)
tc.find('Some description.')
class TestAdminMilestoneDue(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin milestone duedate"""
name = "DueMilestone"
duedate = datetime.now(tz=utc)
duedate_string = format_date(duedate, tzinfo=utc)
self._tester.create_milestone(name, due=duedate_string)
tc.find(duedate_string)
class TestAdminMilestoneDetailDue(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin modify milestone duedate on detail page"""
name = "DetailDueMilestone"
# Create a milestone
self._tester.create_milestone(name)
# Modify the details of the milestone
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.follow(name)
tc.url(milestone_url + '/' + name)
duedate = datetime.now(tz=utc)
duedate_string = format_date(duedate, tzinfo=utc)
tc.formvalue('modifymilestone', 'due', duedate_string)
tc.submit('save')
tc.url(milestone_url + '$')
tc.find(name + '(<[^>]*>|\\s)*'+ duedate_string, 's')
class TestAdminMilestoneCompleted(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin milestone completed"""
name = "CompletedMilestone"
self._tester.create_milestone(name)
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.follow(name)
tc.url(milestone_url + '/' + name)
tc.formvalue('modifymilestone', 'completed', True)
tc.submit('save')
tc.url(milestone_url + "$")
class TestAdminMilestoneCompletedFuture(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin milestone completed in the future"""
name = "CompletedFutureMilestone"
self._tester.create_milestone(name)
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.url(milestone_url)
tc.follow(name)
tc.url(milestone_url + '/' + name)
tc.formvalue('modifymilestone', 'completed', True)
cdate = datetime.now(tz=utc) + timedelta(days=1)
cdate_string = format_date(cdate, tzinfo=localtz)
tc.formvalue('modifymilestone', 'completeddate', cdate_string)
tc.submit('save')
tc.find('Completion date may not be in the future')
# And make sure it wasn't marked as completed.
self._tester.go_to_roadmap()
tc.find(name)
class TestAdminMilestoneRemove(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove milestone"""
name = "MilestoneRemove"
self._tester.create_milestone(name)
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.formvalue('milestone_table', 'sel', name)
tc.submit('remove')
tc.url(milestone_url + '$')
tc.notfind(name)
class TestAdminMilestoneRemoveMulti(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove multiple milestones"""
name = "MultiRemoveMilestone"
count = 3
for i in range(count):
self._tester.create_milestone("%s%s" % (name, i))
milestone_url = self._tester.url + '/admin/ticket/milestones'
tc.go(milestone_url)
tc.url(milestone_url + '$')
for i in range(count):
tc.find("%s%s" % (name, i))
for i in range(count):
tc.formvalue('milestone_table', 'sel', "%s%s" % (name, i))
tc.submit('remove')
tc.url(milestone_url + '$')
for i in range(count):
tc.notfind("%s%s" % (name, i))
class TestAdminMilestoneNonRemoval(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove no selected milestone"""
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.formvalue('milestone_table', 'remove', 'Remove selected items')
tc.submit('remove')
tc.find('No milestone selected')
class TestAdminMilestoneDefault(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin set default milestone"""
name = "DefaultMilestone"
self._tester.create_milestone(name)
milestone_url = self._tester.url + "/admin/ticket/milestones"
tc.go(milestone_url)
tc.formvalue('milestone_table', 'default', name)
tc.submit('apply')
tc.find('type="radio" name="default" value="%s" checked="checked"' % \
name)
# verify it is the default on the newticket page.
tc.go(self._tester.url + '/newticket')
tc.find('<option selected="selected" value="%s">%s</option>'
% (name, name))
class TestAdminPriority(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create priority"""
self._tester.create_priority()
class TestAdminPriorityDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate priority"""
name = "DuplicatePriority"
self._tester.create_priority(name)
self._tester.create_priority(name)
tc.find('Priority %s already exists' % name)
class TestAdminPriorityModify(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin modify priority"""
name = "ModifyPriority"
self._tester.create_priority(name)
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
tc.find(name)
tc.follow(name)
tc.formvalue('modenum', 'name', name * 2)
tc.submit('save')
tc.url(priority_url + '$')
tc.find(name * 2)
class TestAdminPriorityRemove(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove priority"""
name = "RemovePriority"
self._tester.create_priority(name)
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
tc.find(name)
tc.formvalue('enumtable', 'sel', name)
tc.submit('remove')
tc.url(priority_url + '$')
tc.notfind(name)
class TestAdminPriorityRemoveMulti(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove multiple priorities"""
name = "MultiRemovePriority"
count = 3
for i in range(count):
self._tester.create_priority("%s%s" % (name, i))
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
for i in range(count):
tc.find("%s%s" % (name, i))
for i in range(count):
tc.formvalue('enumtable', 'sel', "%s%s" % (name, i))
tc.submit('remove')
tc.url(priority_url + '$')
for i in range(count):
tc.notfind("%s%s" % (name, i))
class TestAdminPriorityNonRemoval(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove no selected priority"""
priority_url = self._tester.url + "/admin/ticket/priority"
tc.go(priority_url)
tc.formvalue('enumtable', 'remove', 'Remove selected items')
tc.submit('remove')
tc.find('No priority selected')
class TestAdminPriorityDefault(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin default priority"""
name = "DefaultPriority"
self._tester.create_priority(name)
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
tc.find(name)
tc.formvalue('enumtable', 'default', name)
tc.submit('apply')
tc.url(priority_url + '$')
tc.find('radio.*"%s"\\schecked="checked"' % name)
class TestAdminPriorityDetail(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin modify priority details"""
name = "DetailPriority"
# Create a priority
self._tester.create_priority(name + '1')
# Modify the details of the priority
priority_url = self._tester.url + "/admin/ticket/priority"
tc.go(priority_url)
tc.url(priority_url + '$')
tc.follow(name + '1')
tc.url(priority_url + '/' + name + '1')
tc.formvalue('modenum', 'name', name + '2')
tc.submit('save')
tc.url(priority_url + '$')
# Cancel more modifications
tc.go(priority_url)
tc.follow(name)
tc.formvalue('modenum', 'name', name + '3')
tc.submit('cancel')
tc.url(priority_url + '$')
# Verify that only the correct modifications show up
tc.notfind(name + '1')
tc.find(name + '2')
tc.notfind(name + '3')
class TestAdminPriorityRenumber(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin renumber priorities"""
valuesRE = re.compile('<select name="value_([0-9]+)">', re.M)
html = b.get_html()
max_priority = max([int(x) for x in valuesRE.findall(html)])
name = "RenumberPriority"
self._tester.create_priority(name + '1')
self._tester.create_priority(name + '2')
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
tc.find(name + '1')
tc.find(name + '2')
tc.formvalue('enumtable', 'value_%s' % (max_priority + 1), str(max_priority + 2))
tc.formvalue('enumtable', 'value_%s' % (max_priority + 2), str(max_priority + 1))
tc.submit('apply')
tc.url(priority_url + '$')
# Verify that their order has changed.
tc.find(name + '2.*' + name + '1', 's')
class TestAdminPriorityRenumberDup(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin badly renumber priorities"""
# Make the first priority the 2nd priority, and leave the 2nd priority
# as the 2nd priority.
priority_url = self._tester.url + '/admin/ticket/priority'
tc.go(priority_url)
tc.url(priority_url + '$')
tc.formvalue('enumtable', 'value_1', '2')
tc.submit('apply')
tc.url(priority_url + '$')
tc.find('Order numbers must be unique')
class TestAdminResolution(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create resolution"""
self._tester.create_resolution()
class TestAdminResolutionDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate resolution"""
name = "DuplicateResolution"
self._tester.create_resolution(name)
self._tester.create_resolution(name)
tc.find('Resolution value "%s" already exists' % name)
class TestAdminSeverity(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create severity"""
self._tester.create_severity()
class TestAdminSeverityDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate severity"""
name = "DuplicateSeverity"
self._tester.create_severity(name)
self._tester.create_severity(name)
tc.find('Severity value "%s" already exists' % name)
class TestAdminType(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create type"""
self._tester.create_type()
class TestAdminTypeDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate type"""
name = "DuplicateType"
self._tester.create_type(name)
self._tester.create_type(name)
tc.find('Type value "%s" already exists' % name)
class TestAdminVersion(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create version"""
self._tester.create_version()
class TestAdminVersionDuplicates(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create duplicate version"""
name = "DuplicateVersion"
self._tester.create_version(name)
version_admin = self._tester.url + "/admin/ticket/versions"
tc.go(version_admin)
tc.url(version_admin)
tc.formvalue('addversion', 'name', name)
tc.submit()
tc.notfind(internal_error)
tc.find("Version %s already exists." % name)
class TestAdminVersionDetail(FunctionalTwillTestCaseSetup):
# This is somewhat pointless... the only place to find the version
# description is on the version details page.
def runTest(self):
"""Admin version details"""
name = "DetailVersion"
self._tester.create_version(name)
version_admin = self._tester.url + "/admin/ticket/versions"
tc.go(version_admin)
tc.url(version_admin)
tc.follow(name)
desc = 'Some version description.'
tc.formvalue('modifyversion', 'description', desc)
tc.submit('save')
tc.url(version_admin)
tc.follow(name)
tc.find(desc)
class TestAdminVersionDetailTime(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin version detail set time"""
name = "DetailTimeVersion"
self._tester.create_version(name)
version_admin = self._tester.url + "/admin/ticket/versions"
tc.go(version_admin)
tc.url(version_admin)
tc.follow(name)
tc.formvalue('modifyversion', 'time', '')
tc.submit('save')
tc.url(version_admin + '$')
tc.find(name + '(<[^>]*>|\\s)*<[^>]* name="default" value="%s"' % name, 's')
class TestAdminVersionDetailCancel(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin version details"""
name = "DetailVersion"
self._tester.create_version(name)
version_admin = self._tester.url + "/admin/ticket/versions"
tc.go(version_admin)
tc.url(version_admin)
tc.follow(name)
desc = 'Some other version description.'
tc.formvalue('modifyversion', 'description', desc)
tc.submit('cancel')
tc.url(version_admin)
tc.follow(name)
tc.notfind(desc)
class TestAdminVersionRemove(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove version"""
name = "VersionRemove"
self._tester.create_version(name)
version_url = self._tester.url + "/admin/ticket/versions"
tc.go(version_url)
tc.formvalue('version_table', 'sel', name)
tc.submit('remove')
tc.url(version_url + '$')
tc.notfind(name)
class TestAdminVersionRemoveMulti(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove multiple versions"""
name = "MultiRemoveVersion"
count = 3
for i in range(count):
self._tester.create_version("%s%s" % (name, i))
version_url = self._tester.url + '/admin/ticket/versions'
tc.go(version_url)
tc.url(version_url + '$')
for i in range(count):
tc.find("%s%s" % (name, i))
for i in range(count):
tc.formvalue('version_table', 'sel', "%s%s" % (name, i))
tc.submit('remove')
tc.url(version_url + '$')
for i in range(count):
tc.notfind("%s%s" % (name, i))
class TestAdminVersionNonRemoval(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin remove no selected version"""
version_url = self._tester.url + "/admin/ticket/versions"
tc.go(version_url)
tc.formvalue('version_table', 'remove', 'Remove selected items')
tc.submit('remove')
tc.find('No version selected')
class TestAdminVersionDefault(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin set default version"""
name = "DefaultVersion"
self._tester.create_version(name)
version_url = self._tester.url + "/admin/ticket/versions"
tc.go(version_url)
tc.formvalue('version_table', 'default', name)
tc.submit('apply')
tc.find('type="radio" name="default" value="%s" checked="checked"' % \
name)
# verify it is the default on the newticket page.
tc.go(self._tester.url + '/newticket')
tc.find('<option selected="selected" value="%s">%s</option>'
% (name, name))
class TestNewReport(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Create a new report"""
self._tester.create_report(
'Closed tickets, modified in the past 7 days by owner.',
'SELECT DISTINCT p.value AS __color__,'
' id AS ticket,'
' summary, component, milestone, t.type AS type,'
' reporter, time AS created,'
' changetime AS modified, description AS _description,'
' priority,'
' round(julianday(\'now\') - '
' julianday(changetime, \'unixepoch\')) as days,'
' resolution,'
' owner as __group__'
' FROM ticket t'
' LEFT JOIN enum p ON p.name = t.priority AND '
' p.type = \'priority\''
' WHERE ((julianday(\'now\') -'
' julianday(changetime, \'unixepoch\')) < 7)'
' AND status = \'closed\''
' ORDER BY __group__, changetime, p.value',
'List of all tickets that are closed, and have been modified in'
' the past 7 days, grouped by owner.\n\n(So they have probably'
' been closed this week.)')
class RegressionTestRev5665(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Admin create version without release time (r5665)"""
self._tester.create_version(releasetime='')
class RegressionTestRev5994(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of the column label fix in r5994"""
env = self._testenv.get_trac_environment()
env.config.set('ticket-custom', 'custfield', 'text')
env.config.set('ticket-custom', 'custfield.label', 'Custom Field')
env.config.save()
try:
self._testenv.restart()
self._tester.go_to_query()
tc.find('<label>( |\\n)*<input[^<]*value="custfield"'
'[^<]*/>( |\\n)*Custom Field( |\\n)*</label>', 's')
finally:
pass
#env.config.set('ticket', 'restrict_owner', 'no')
#env.config.save()
#self._testenv.restart()
class RegressionTestTicket4447(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/4447"""
ticketid = self._tester.create_ticket(summary="Hello World")
env = self._testenv.get_trac_environment()
env.config.set('ticket-custom', 'newfield', 'text')
env.config.set('ticket-custom', 'newfield.label',
'Another Custom Field')
env.config.save()
try:
self._testenv.restart()
self._tester.go_to_ticket(ticketid)
self._tester.add_comment(ticketid)
tc.notfind('deleted')
tc.notfind('set to')
finally:
pass
class RegressionTestTicket4630a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/4630 a"""
env = self._testenv.get_trac_environment()
env.config.set('ticket', 'restrict_owner', 'yes')
env.config.save()
try:
self._testenv.restart()
# Make sure 'user' has logged in.
self._tester.go_to_front()
self._tester.logout()
self._tester.login('user')
self._tester.logout()
self._tester.login('admin')
ticket_id = self._tester.create_ticket()
self._tester.go_to_ticket(ticket_id)
tc.formvalue('propertyform', 'action', 'reassign')
tc.find('reassign_reassign_owner')
tc.formvalue('propertyform', 'action_reassign_reassign_owner', 'user')
tc.submit('submit')
finally:
# Undo the config change for now since this (failing)
# regression test causes problems for later tests.
env.config.set('ticket', 'restrict_owner', 'no')
env.config.save()
self._testenv.restart()
class RegressionTestTicket4630b(FunctionalTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/4630 b"""
# NOTE: this must be run after RegressionTestTicket4630 (user must
# have logged in)
from trac.perm import PermissionSystem
env = self._testenv.get_trac_environment()
perm = PermissionSystem(env)
users = perm.get_users_with_permission('TRAC_ADMIN')
self.assertEqual(users, ['admin'])
users = perm.get_users_with_permission('TICKET_MODIFY')
self.assertEqual(users, ['admin', 'user'])
class RegressionTestTicket5022(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5022
"""
summary = 'RegressionTestTicket5022'
ticket_id = self._tester.create_ticket(summary=summary)
tc.go(self._tester.url + '/newticket?id=%s' % ticket_id)
tc.notfind(summary)
class RegressionTestTicket5394a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5394 a
Order user list alphabetically in (re)assign action
"""
# set restrict_owner config
env = self._testenv.get_trac_environment()
env.config.set('ticket', 'restrict_owner', 'yes')
env.config.save()
self._testenv.restart()
self._tester.go_to_front()
self._tester.logout()
test_users = ['alice', 'bob', 'jane', 'john', 'charlie', 'alan',
'zorro']
# Apprently it takes a sec for the new user to be recognized by the
# environment. So we add all the users, then log in as the users
# in a second loop. This should be faster than adding a sleep(1)
# between the .adduser and .login steps.
for user in test_users:
self._testenv.adduser(user)
for user in test_users:
self._tester.login(user)
self._tester.logout()
self._tester.login('admin')
ticketid = self._tester.create_ticket("regression test 5394a")
self._tester.go_to_ticket(ticketid)
options = 'id="action_reassign_reassign_owner">' + \
''.join(['<option[^>]*>%s</option>' % user for user in
sorted(test_users + ['admin', 'user'])])
tc.find(options, 's')
# We don't have a good way to fully delete a user from the Trac db.
# Once we do, we may want to cleanup our list of users here.
class RegressionTestTicket5394b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5394 b
Order user list alphabetically on new ticket page
"""
# Must run after RegressionTestTicket5394a
self._tester.go_to_front()
tc.follow('New Ticket')
tc.find('Create New Ticket')
test_users = ['alice', 'bob', 'jane', 'john', 'charlie', 'alan',
'zorro']
options = 'id="field-owner"[^>]*>[[:space:]]*<option/>.*' + \
'.*'.join(['<option[^>]*>%s</option>' % user for user in
sorted(test_users + ['admin', 'user'])])
options = '.*'.join(sorted(test_users + ['admin', 'user']))
tc.find(options, 's')
# TODO: this should probably be changed to be a testsuite derived from
# TestSetup
class RegressionTestTicket5497prep(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5497 prep
When the component is changed, the owner should update to the
default owner of the component.
If component is changed and the owner is changed (reassigned action
for open tickets in the basic workflow), the owner should be the
specified owner, not the owner of the component.
"""
# The default owner for the component we're using for this testcase
# is 'user', and we'll manually assign to 'admin'.
self._tester.create_component('regression5497', 'user')
class RegressionTestTicket5497a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5497 a
Open ticket, component changed, owner not changed"""
ticketid = self._tester.create_ticket("regression test 5497a")
self._tester.go_to_ticket(ticketid)
tc.formvalue('propertyform', 'field-component', 'regression5497')
tc.submit('submit')
tc.find(regex_owned_by('user'))
class RegressionTestTicket5497b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5497 b
Open ticket, component changed, owner changed"""
ticketid = self._tester.create_ticket("regression test 5497b")
self._tester.go_to_ticket(ticketid)
tc.formvalue('propertyform', 'field-component', 'regression5497')
tc.formvalue('propertyform', 'action', 'reassign')
tc.formvalue('propertyform', 'action_reassign_reassign_owner', 'admin')
tc.submit('submit')
tc.notfind(regex_owned_by('user'))
tc.find(regex_owned_by('admin'))
class RegressionTestTicket5497c(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5497 c
New ticket, component changed, owner not changed"""
ticketid = self._tester.create_ticket("regression test 5497c",
{'component':'regression5497'})
self._tester.go_to_ticket(ticketid)
tc.find(regex_owned_by('user'))
class RegressionTestTicket5497d(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5497 d
New ticket, component changed, owner changed"""
ticketid = self._tester.create_ticket("regression test 5497d",
{'component':'regression5497', 'owner':'admin'})
self._tester.go_to_ticket(ticketid)
tc.find(regex_owned_by('admin'))
class RegressionTestTicket5602(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5602"""
# Create a set of tickets, and assign them all to a milestone
milestone = self._tester.create_milestone()
ids = [self._tester.create_ticket() for x in range(5)]
[self._tester.ticket_set_milestone(x, milestone) for x in ids]
# Need a ticket in each state: new, assigned, accepted, closed,
# reopened
# leave ids[0] as new
# make ids[1] be assigned
self._tester.go_to_ticket(ids[1])
tc.formvalue('propertyform', 'action', 'reassign')
tc.formvalue('propertyform', 'action_reassign_reassign_owner', 'admin')
tc.submit('submit')
# make ids[2] be accepted
self._tester.go_to_ticket(ids[2])
tc.formvalue('propertyform', 'action', 'accept')
tc.submit('submit')
# make ids[3] be closed
self._tester.go_to_ticket(ids[3])
tc.formvalue('propertyform', 'action', 'resolve')
tc.formvalue('propertyform', 'action_resolve_resolve_resolution', 'fixed')
tc.submit('submit')
# make ids[4] be reopened
self._tester.go_to_ticket(ids[4])
tc.formvalue('propertyform', 'action', 'resolve')
tc.formvalue('propertyform', 'action_resolve_resolve_resolution', 'fixed')
tc.submit('submit')
# FIXME: we have to wait a second to avoid "IntegrityError: columns
# ticket, time, field are not unique"
time.sleep(1)
tc.formvalue('propertyform', 'action', 'reopen')
tc.submit('submit')
tc.show()
tc.notfind("Python Traceback")
# Go to the milestone and follow the links to the closed and active
# tickets.
tc.go(self._tester.url + "/roadmap")
tc.follow(milestone)
tc.follow("closed:")
tc.find("Resolution:[ \t\n]+fixed")
tc.back()
tc.follow("active:")
tc.find("Status:[ \t\n]+new")
tc.find("Status:[ \t\n]+assigned")
tc.find("Status:[ \t\n]+accepted")
tc.notfind("Status:[ \t\n]+closed")
tc.find("Status:[ \t\n]+reopened")
class RegressionTestTicket5687(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5687"""
self._tester.logout()
self._tester.login('user')
ticketid = self._tester.create_ticket()
self._tester.logout()
self._tester.login('admin')
class RegressionTestTicket5930(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/5930
TypeError: from_string() takes exactly 3 non-keyword arguments (4
given)
Caused by a saved query
"""
self._tester.create_report('Saved Query', 'query:version=1.0', '')
tc.notfind(internal_error)
# TODO: Add a testcase for the following:
# Can you also throw in addition of a 1.0 ticket and a 2.0 ticket
# as part of the demo env, then see that only the correct one shows
# up in the report?
class RegressionTestTicket6048(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6048"""
# Setup the DeleteTicket plugin
plugin = open(os.path.join(self._testenv.command_cwd, 'sample-plugins',
'workflow', 'DeleteTicket.py')).read()
open(os.path.join(self._testenv.tracdir, 'plugins', 'DeleteTicket.py'),
'w').write(plugin)
env = self._testenv.get_trac_environment()
prevconfig = env.config.get('ticket', 'workflow')
env.config.set('ticket', 'workflow',
prevconfig + ',DeleteTicketActionController')
env.config.save()
env = self._testenv.get_trac_environment() # reload environment
# Create a ticket and delete it
ticket_id = self._tester.create_ticket(
summary='RegressionTestTicket6048')
# (Create a second ticket so that the ticket id does not get reused
# and confuse the tester object.)
self._tester.create_ticket(summary='RegressionTestTicket6048b')
self._tester.go_to_ticket(ticket_id)
tc.find('delete ticket')
tc.formvalue('propertyform', 'action', 'delete')
tc.submit('submit')
self._tester.go_to_ticket(ticket_id)
tc.find('Error: Invalid ticket number')
tc.find('Ticket %s does not exist.' % ticket_id)
# Remove the DeleteTicket plugin
env.config.set('ticket', 'workflow', prevconfig)
env.config.save()
env = self._testenv.get_trac_environment() # reload environment
for ext in ('py', 'pyc', 'pyo'):
filename = os.path.join(self._testenv.tracdir, 'plugins',
'DeleteTicket.%s' % ext)
if os.path.exists(filename):
os.unlink(filename)
class RegressionTestTicket6747(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6747"""
env = self._testenv.get_trac_environment()
env.config.set('ticket-workflow', 'resolve.operations',
'set_resolution,set_owner')
env.config.set('ticket-workflow', 'resolve.set_owner',
'a_specified_owner')
env.config.save()
try:
self._testenv.restart()
ticket_id = self._tester.create_ticket("RegressionTestTicket6747")
self._tester.go_to_ticket(ticket_id)
tc.find("a_specified_owner")
tc.notfind("a_specified_owneras")
finally:
# Undo the config change to avoid causing problems for later
# tests.
env.config.set('ticket-workflow', 'resolve.operations',
'set_resolution')
env.config.remove('ticket-workflow', 'resolve.set_owner')
env.config.save()
self._testenv.restart()
class RegressionTestTicket6879a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6879 a
Make sure that previewing a close does not make the available actions
be those for the close status.
"""
# create a ticket, then preview resolving the ticket twice
ticket_id = self._tester.create_ticket("RegressionTestTicket6879 a")
self._tester.go_to_ticket(ticket_id)
tc.formvalue('propertyform', 'action', 'resolve')
tc.formvalue('propertyform', 'action_resolve_resolve_resolution', 'fixed')
tc.submit('preview')
tc.formvalue('propertyform', 'action', 'resolve')
tc.submit('preview')
class RegressionTestTicket6879b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6879 a
Make sure that previewing a close does not make the available actions
be those for the close status.
"""
# create a ticket, then preview resolving the ticket twice
ticket_id = self._tester.create_ticket("RegressionTestTicket6879 b")
self._tester.go_to_ticket(ticket_id)
tc.formvalue('propertyform', 'action', 'resolve')
tc.formvalue('propertyform', 'action_resolve_resolve_resolution', 'fixed')
tc.submit('preview')
tc.formvalue('propertyform', 'action', 'resolve')
tc.submit('submit')
class RegressionTestTicket6912a(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6912 a"""
try:
self._tester.create_component(name='RegressionTestTicket6912a',
user='')
except twill.utils.ClientForm.ItemNotFoundError, e:
raise twill.errors.TwillAssertionError(e)
class RegressionTestTicket6912b(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/6912 b"""
self._tester.create_component(name='RegressionTestTicket6912b',
user='admin')
tc.follow('RegressionTestTicket6912b')
try:
tc.formvalue('modcomp', 'owner', '')
except twill.utils.ClientForm.ItemNotFoundError, e:
raise twill.errors.TwillAssertionError(e)
tc.formvalue('modcomp', 'save', 'Save')
tc.submit()
tc.find('RegressionTestTicket6912b</a>[ \n\t]*</td>[ \n\t]*'
'<td class="owner"></td>', 's')
class RegressionTestTicket8247(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/8247
Author field of ticket comment corresponding to the milestone removal
was always 'anonymous'."""
name = "MilestoneRemove"
self._tester.create_milestone(name)
id = self._tester.create_ticket(info={'milestone': name})
ticket_url = self._tester.url + "/ticket/%d" % id
tc.go(ticket_url)
tc.find(name)
tc.go(self._tester.url + "/admin/ticket/milestones")
tc.formvalue('milestone_table', 'sel', name)
tc.submit('remove')
tc.go(ticket_url)
tc.find('<strong>Milestone</strong>[ \n\t]*<em>%s</em> deleted' % name)
tc.find('Changed <a.*</a> ago by admin')
tc.notfind('anonymous')
class RegressionTestTicket8861(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/8816
When creating a milestone with an already existing name, you get
a warning. After changing the name you will find that the original
milestone with that name is renamed instead of a new one being
created."""
name = "8861Milestone"
self._tester.create_milestone(name)
tc.go(self._tester.url + "/milestone?action=new")
tc.formvalue('edit', 'name', name)
tc.submit('Add milestone')
tc.find('Milestone "%s" already exists' % name)
tc.formvalue('edit', 'name', name + '__')
tc.submit('Add milestone')
tc.go(self._tester.url + "/roadmap")
tc.find('Milestone: <em>%s</em>' % name)
tc.find('Milestone: <em>%s</em>' % (name + '__'))
class RegressionTestTicket9084(FunctionalTwillTestCaseSetup):
def runTest(self):
"""Test for regression of http://trac.edgewall.org/ticket/9084"""
ticketid = self._tester.create_ticket()
self._tester.add_comment(ticketid)
self._tester.go_to_ticket(ticketid)
tc.formvalue('reply-to-comment-1', 'replyto', '1')
tc.submit('Reply')
tc.formvalue('propertyform', 'comment', random_sentence(3))
tc.submit('Submit changes')
tc.notfind('AssertionError')
def functionalSuite(suite=None):
if not suite:
import trac.tests.functional.testcases
suite = trac.tests.functional.testcases.functionalSuite()
suite.addTest(TestTickets())
suite.addTest(TestTicketPreview())
suite.addTest(TestTicketNoSummary())
suite.addTest(TestTicketAltFormats())
suite.addTest(TestTicketCSVFormat())
suite.addTest(TestTicketTabFormat())
suite.addTest(TestTicketRSSFormat())
suite.addTest(TestTicketSearch())
suite.addTest(TestNonTicketSearch())
suite.addTest(TestTicketHistory())
suite.addTest(TestTicketHistoryDiff())
suite.addTest(TestTicketQueryLinks())
suite.addTest(TestTicketQueryOrClause())
suite.addTest(TestTimelineTicketDetails())
suite.addTest(TestAdminComponent())
suite.addTest(TestAdminComponentDuplicates())
suite.addTest(TestAdminComponentRemoval())
suite.addTest(TestAdminComponentNonRemoval())
suite.addTest(TestAdminComponentDefault())
suite.addTest(TestAdminComponentDetail())
suite.addTest(TestAdminMilestone())
suite.addTest(TestAdminMilestoneSpace())
suite.addTest(TestAdminMilestoneDuplicates())
suite.addTest(TestAdminMilestoneDetail())
suite.addTest(TestAdminMilestoneDue())
suite.addTest(TestAdminMilestoneDetailDue())
suite.addTest(TestAdminMilestoneCompleted())
suite.addTest(TestAdminMilestoneCompletedFuture())
suite.addTest(TestAdminMilestoneRemove())
suite.addTest(TestAdminMilestoneRemoveMulti())
suite.addTest(TestAdminMilestoneNonRemoval())
suite.addTest(TestAdminMilestoneDefault())
suite.addTest(TestAdminPriority())
suite.addTest(TestAdminPriorityModify())
|
suite.addTest(TestAdminPriorityDefault())
suite.addTest(TestAdminPriorityDetail())
suite.addTest(TestAdminPriorityRenumber())
suite.addTest(TestAdminPriorityRenumberDup())
suite.addTest(TestAdminResolution())
suite.addTest(TestAdminResolutionDuplicates())
suite.addTest(TestAdminSeverity())
suite.addTest(TestAdminSeverityDuplicates())
suite.addTest(TestAdminType())
suite.addTest(TestAdminTypeDuplicates())
suite.addTest(TestAdminVersion())
suite.addTest(TestAdminVersionDuplicates())
suite.addTest(TestAdminVersionDetail())
suite.addTest(TestAdminVersionDetailTime())
suite.addTest(TestAdminVersionDetailCancel())
suite.addTest(TestAdminVersionRemove())
suite.addTest(TestAdminVersionRemoveMulti())
suite.addTest(TestAdminVersionNonRemoval())
suite.addTest(TestAdminVersionDefault())
suite.addTest(TestNewReport())
suite.addTest(RegressionTestRev5665())
suite.addTest(RegressionTestRev5994())
suite.addTest(RegressionTestTicket4447())
suite.addTest(RegressionTestTicket4630a())
suite.addTest(RegressionTestTicket4630b())
suite.addTest(RegressionTestTicket5022())
suite.addTest(RegressionTestTicket5394a())
suite.addTest(RegressionTestTicket5394b())
suite.addTest(RegressionTestTicket5497prep())
suite.addTest(RegressionTestTicket5497a())
suite.addTest(RegressionTestTicket5497b())
suite.addTest(RegressionTestTicket5497c())
suite.addTest(RegressionTestTicket5497d())
suite.addTest(RegressionTestTicket5602())
suite.addTest(RegressionTestTicket5687())
suite.addTest(RegressionTestTicket5930())
suite.addTest(RegressionTestTicket6048())
suite.addTest(RegressionTestTicket6747())
suite.addTest(RegressionTestTicket6879a())
suite.addTest(RegressionTestTicket6879b())
suite.addTest(RegressionTestTicket6912a())
suite.addTest(RegressionTestTicket6912b())
suite.addTest(RegressionTestTicket8247())
suite.addTest(RegressionTestTicket8861())
suite.addTest(RegressionTestTicket9084())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='functionalSuite')
|
suite.addTest(TestAdminPriorityRemove())
suite.addTest(TestAdminPriorityRemoveMulti())
suite.addTest(TestAdminPriorityNonRemoval())
|
lib.rs
|
//! EVM gasometer.
#![deny(warnings)]
#![forbid(unsafe_code, unused_variables)]
#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "tracing")]
pub mod tracing;
#[cfg(feature = "tracing")]
macro_rules! event {
($x:expr) => {
|
$x.emit();
}
}
#[cfg(not(feature = "tracing"))]
macro_rules! event {
($x:expr) => { }
}
mod consts;
mod costs;
mod memory;
mod utils;
use core::cmp::max;
use primitive_types::{H160, H256, U256};
use evm_core::{Opcode, ExitError, Stack};
use evm_runtime::{Handler, Config};
macro_rules! try_or_fail {
( $inner:expr, $e:expr ) => (
match $e {
Ok(value) => value,
Err(e) => {
$inner = Err(e.clone());
return Err(e)
},
}
)
}
#[derive(Debug, Copy, Clone)]
pub struct Snapshot {
pub gas_limit: u64,
pub memory_gas: u64,
pub used_gas: u64,
pub refunded_gas: i64,
}
/// EVM gasometer.
#[derive(Clone)]
pub struct Gasometer<'config> {
gas_limit: u64,
config: &'config Config,
inner: Result<Inner<'config>, ExitError>
}
impl<'config> Gasometer<'config> {
/// Create a new gasometer with given gas limit and config.
pub fn new(gas_limit: u64, config: &'config Config) -> Self {
Self {
gas_limit,
config,
inner: Ok(Inner {
memory_gas: 0,
used_gas: 0,
refunded_gas: 0,
config,
}),
}
}
#[inline]
/// Returns the numerical gas cost value.
pub fn gas_cost(
&self,
cost: GasCost,
gas: u64,
) -> Result<u64, ExitError> {
match self.inner.as_ref() {
Ok(inner) => inner.gas_cost(cost, gas),
Err(e) => Err(e.clone())
}
}
#[inline]
fn inner_mut(
&mut self
) -> Result<&mut Inner<'config>, ExitError> {
self.inner.as_mut().map_err(|e| e.clone())
}
#[inline]
/// Reference of the config.
pub fn config(&self) -> &'config Config {
self.config
}
#[inline]
/// Remaining gas.
pub fn gas(&self) -> u64 {
match self.inner.as_ref() {
Ok(inner) => self.gas_limit - inner.used_gas - inner.memory_gas,
Err(_) => 0,
}
}
#[inline]
/// Total used gas.
pub fn total_used_gas(&self) -> u64 {
match self.inner.as_ref() {
Ok(inner) => inner.used_gas + inner.memory_gas,
Err(_) => self.gas_limit,
}
}
#[inline]
/// Refunded gas.
pub fn refunded_gas(&self) -> i64 {
match self.inner.as_ref() {
Ok(inner) => inner.refunded_gas,
Err(_) => 0,
}
}
/// Explictly fail the gasometer with out of gas. Return `OutOfGas` error.
pub fn fail(&mut self) -> ExitError {
self.inner = Err(ExitError::OutOfGas);
ExitError::OutOfGas
}
#[inline]
/// Record an explict cost.
pub fn record_cost(
&mut self,
cost: u64,
) -> Result<(), ExitError> {
event!(RecordCost {
cost,
snapshot: self.snapshot()?,
});
let all_gas_cost = self.total_used_gas() + cost;
if self.gas_limit < all_gas_cost {
self.inner = Err(ExitError::OutOfGas);
return Err(ExitError::OutOfGas)
}
self.inner_mut()?.used_gas += cost;
Ok(())
}
#[inline]
/// Record an explict refund.
pub fn record_refund(
&mut self,
refund: i64,
) -> Result<(), ExitError> {
event!(RecordRefund {
refund,
snapshot: self.snapshot()?,
});
self.inner_mut()?.refunded_gas += refund;
Ok(())
}
#[inline]
/// Record `CREATE` code deposit.
pub fn record_deposit(
&mut self,
len: usize,
) -> Result<(), ExitError> {
let cost = len as u64 * consts::G_CODEDEPOSIT;
self.record_cost(cost)
}
/// Record opcode gas cost.
pub fn record_dynamic_cost(
&mut self,
cost: GasCost,
memory: Option<MemoryCost>,
) -> Result<(), ExitError> {
let gas = self.gas();
let memory_gas = match memory {
Some(memory) => try_or_fail!(self.inner, self.inner_mut()?.memory_gas(memory)),
None => self.inner_mut()?.memory_gas,
};
let gas_cost = try_or_fail!(self.inner, self.inner_mut()?.gas_cost(cost, gas));
let gas_refund = self.inner_mut()?.gas_refund(cost);
let used_gas = self.inner_mut()?.used_gas;
event!(RecordDynamicCost {
gas_cost,
memory_gas,
gas_refund,
snapshot: self.snapshot()?,
});
let all_gas_cost = memory_gas + used_gas + gas_cost;
if self.gas_limit < all_gas_cost {
self.inner = Err(ExitError::OutOfGas);
return Err(ExitError::OutOfGas)
}
let after_gas = self.gas_limit - all_gas_cost;
try_or_fail!(self.inner, self.inner_mut()?.extra_check(cost, after_gas));
self.inner_mut()?.used_gas += gas_cost;
self.inner_mut()?.memory_gas = memory_gas;
self.inner_mut()?.refunded_gas += gas_refund;
Ok(())
}
#[inline]
/// Record opcode stipend.
pub fn record_stipend(
&mut self,
stipend: u64,
) -> Result<(), ExitError> {
event!(RecordStipend {
stipend,
snapshot: self.snapshot()?,
});
self.inner_mut()?.used_gas -= stipend;
Ok(())
}
/// Record transaction cost.
pub fn record_transaction(
&mut self,
cost: TransactionCost,
) -> Result<(), ExitError> {
let gas_cost = match cost {
TransactionCost::Call { zero_data_len, non_zero_data_len } => {
self.config.gas_transaction_call +
zero_data_len as u64 * self.config.gas_transaction_zero_data +
non_zero_data_len as u64 * self.config.gas_transaction_non_zero_data
},
TransactionCost::Create { zero_data_len, non_zero_data_len } => {
self.config.gas_transaction_create +
zero_data_len as u64 * self.config.gas_transaction_zero_data +
non_zero_data_len as u64 * self.config.gas_transaction_non_zero_data
},
};
event!(RecordTransaction {
cost: gas_cost,
snapshot: self.snapshot()?,
});
if self.gas() < gas_cost {
self.inner = Err(ExitError::OutOfGas);
return Err(ExitError::OutOfGas);
}
self.inner_mut()?.used_gas += gas_cost;
Ok(())
}
pub fn snapshot(&self) -> Result<Snapshot, ExitError> {
let inner = self.inner.as_ref().map_err(|e| e.clone())?;
Ok(Snapshot {
gas_limit: self.gas_limit,
memory_gas: inner.memory_gas,
used_gas: inner.used_gas,
refunded_gas: inner.refunded_gas,
})
}
}
/// Calculate the call transaction cost.
pub fn call_transaction_cost(
data: &[u8]
) -> TransactionCost {
let zero_data_len = data.iter().filter(|v| **v == 0).count();
let non_zero_data_len = data.len() - zero_data_len;
TransactionCost::Call { zero_data_len, non_zero_data_len }
}
/// Calculate the create transaction cost.
pub fn create_transaction_cost(
data: &[u8]
) -> TransactionCost {
let zero_data_len = data.iter().filter(|v| **v == 0).count();
let non_zero_data_len = data.len() - zero_data_len;
TransactionCost::Create { zero_data_len, non_zero_data_len }
}
#[inline]
pub fn static_opcode_cost(
opcode: Opcode,
) -> Option<u64> {
static TABLE: [Option<u64>; 256] = {
let mut table = [None; 256];
table[Opcode::STOP.as_usize()] = Some(consts::G_ZERO);
table[Opcode::CALLDATASIZE.as_usize()] = Some(consts::G_BASE);
table[Opcode::CODESIZE.as_usize()] = Some(consts::G_BASE);
table[Opcode::POP.as_usize()] = Some(consts::G_BASE);
table[Opcode::PC.as_usize()] = Some(consts::G_BASE);
table[Opcode::MSIZE.as_usize()] = Some(consts::G_BASE);
table[Opcode::ADDRESS.as_usize()] = Some(consts::G_BASE);
table[Opcode::ORIGIN.as_usize()] = Some(consts::G_BASE);
table[Opcode::CALLER.as_usize()] = Some(consts::G_BASE);
table[Opcode::CALLVALUE.as_usize()] = Some(consts::G_BASE);
table[Opcode::COINBASE.as_usize()] = Some(consts::G_BASE);
table[Opcode::TIMESTAMP.as_usize()] = Some(consts::G_BASE);
table[Opcode::NUMBER.as_usize()] = Some(consts::G_BASE);
table[Opcode::DIFFICULTY.as_usize()] = Some(consts::G_BASE);
table[Opcode::GASLIMIT.as_usize()] = Some(consts::G_BASE);
table[Opcode::GASPRICE.as_usize()] = Some(consts::G_BASE);
table[Opcode::GAS.as_usize()] = Some(consts::G_BASE);
table[Opcode::ADD.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SUB.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::NOT.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::LT.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::GT.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SLT.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SGT.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::EQ.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::ISZERO.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::AND.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::OR.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::XOR.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::BYTE.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::CALLDATALOAD.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH1.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH2.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH3.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH4.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH5.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH6.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH7.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH8.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH9.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH10.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH11.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH12.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH13.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH14.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH15.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH16.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH17.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH18.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH19.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH20.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH21.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH22.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH23.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH24.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH25.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH26.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH27.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH28.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH29.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH30.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH31.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::PUSH32.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP1.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP2.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP3.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP4.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP5.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP6.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP7.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP8.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP9.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP10.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP11.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP12.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP13.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP14.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP15.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::DUP16.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP1.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP2.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP3.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP4.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP5.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP6.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP7.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP8.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP9.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP10.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP11.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP12.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP13.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP14.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP15.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::SWAP16.as_usize()] = Some(consts::G_VERYLOW);
table[Opcode::MUL.as_usize()] = Some(consts::G_LOW);
table[Opcode::DIV.as_usize()] = Some(consts::G_LOW);
table[Opcode::SDIV.as_usize()] = Some(consts::G_LOW);
table[Opcode::MOD.as_usize()] = Some(consts::G_LOW);
table[Opcode::SMOD.as_usize()] = Some(consts::G_LOW);
table[Opcode::SIGNEXTEND.as_usize()] = Some(consts::G_LOW);
table[Opcode::ADDMOD.as_usize()] = Some(consts::G_MID);
table[Opcode::MULMOD.as_usize()] = Some(consts::G_MID);
table[Opcode::JUMP.as_usize()] = Some(consts::G_MID);
table[Opcode::JUMPI.as_usize()] = Some(consts::G_HIGH);
table[Opcode::JUMPDEST.as_usize()] = Some(consts::G_JUMPDEST);
table
};
TABLE[opcode.as_usize()]
}
/// Calculate the opcode cost.
pub fn dynamic_opcode_cost<H: Handler>(
address: H160,
opcode: Opcode,
stack: &Stack,
is_static: bool,
config: &Config,
handler: &H
) -> Result<(GasCost, Option<MemoryCost>), ExitError> {
let gas_cost = match opcode {
Opcode::RETURN => GasCost::Zero,
Opcode::MLOAD | Opcode::MSTORE | Opcode::MSTORE8 => GasCost::VeryLow,
Opcode::REVERT if config.has_revert => GasCost::Zero,
Opcode::REVERT => GasCost::Invalid,
Opcode::CHAINID if config.has_chain_id => GasCost::Base,
Opcode::CHAINID => GasCost::Invalid,
Opcode::SHL | Opcode::SHR | Opcode::SAR if config.has_bitwise_shifting =>
GasCost::VeryLow,
Opcode::SHL | Opcode::SHR | Opcode::SAR => GasCost::Invalid,
Opcode::SELFBALANCE if config.has_self_balance => GasCost::Low,
Opcode::SELFBALANCE => GasCost::Invalid,
Opcode::EXTCODESIZE => GasCost::ExtCodeSize,
Opcode::BALANCE => GasCost::Balance,
Opcode::BLOCKHASH => GasCost::BlockHash,
Opcode::EXTCODEHASH if config.has_ext_code_hash => GasCost::ExtCodeHash,
Opcode::EXTCODEHASH => GasCost::Invalid,
Opcode::CALLCODE => GasCost::CallCode {
value: U256::from_big_endian(&stack.peek(2)?[..]),
gas: U256::from_big_endian(&stack.peek(0)?[..]),
target_exists: handler.exists(stack.peek(1)?.into()),
},
Opcode::STATICCALL => GasCost::StaticCall {
gas: U256::from_big_endian(&stack.peek(0)?[..]),
target_exists: handler.exists(stack.peek(1)?.into()),
},
Opcode::SHA3 => GasCost::Sha3 {
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::EXTCODECOPY => GasCost::ExtCodeCopy {
len: U256::from_big_endian(&stack.peek(3)?[..]),
},
Opcode::CALLDATACOPY | Opcode::CODECOPY => GasCost::VeryLowCopy {
len: U256::from_big_endian(&stack.peek(2)?[..]),
},
Opcode::EXP => GasCost::Exp {
power: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::SLOAD => GasCost::SLoad,
Opcode::DELEGATECALL if config.has_delegate_call => GasCost::DelegateCall {
gas: U256::from_big_endian(&stack.peek(0)?[..]),
target_exists: handler.exists(stack.peek(1)?.into()),
},
Opcode::DELEGATECALL => GasCost::Invalid,
Opcode::RETURNDATASIZE if config.has_return_data => GasCost::Base,
Opcode::RETURNDATACOPY if config.has_return_data => GasCost::VeryLowCopy {
len: U256::from_big_endian(&stack.peek(2)?[..]),
},
Opcode::RETURNDATASIZE | Opcode::RETURNDATACOPY => GasCost::Invalid,
Opcode::SSTORE if !is_static => {
let index = stack.peek(0)?;
let value = stack.peek(1)?;
GasCost::SStore {
original: handler.original_storage(address, index),
current: handler.storage(address, index),
new: value,
}
},
Opcode::LOG0 if !is_static => GasCost::Log {
n: 0,
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::LOG1 if !is_static => GasCost::Log {
n: 1,
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::LOG2 if !is_static => GasCost::Log {
n: 2,
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::LOG3 if !is_static => GasCost::Log {
n: 3,
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::LOG4 if !is_static => GasCost::Log {
n: 4,
len: U256::from_big_endian(&stack.peek(1)?[..]),
},
Opcode::CREATE if !is_static => GasCost::Create,
Opcode::CREATE2 if !is_static && config.has_create2 => GasCost::Create2 {
len: U256::from_big_endian(&stack.peek(2)?[..]),
},
Opcode::SUICIDE if !is_static => GasCost::Suicide {
value: handler.balance(address),
target_exists: handler.exists(stack.peek(0)?.into()),
already_removed: handler.deleted(address),
},
Opcode::CALL
if !is_static ||
(is_static && U256::from_big_endian(&stack.peek(2)?[..]) == U256::zero()) =>
GasCost::Call {
value: U256::from_big_endian(&stack.peek(2)?[..]),
gas: U256::from_big_endian(&stack.peek(0)?[..]),
target_exists: handler.exists(stack.peek(1)?.into()),
},
_ => GasCost::Invalid,
};
let memory_cost = match opcode {
Opcode::SHA3 | Opcode::RETURN | Opcode::REVERT |
Opcode::LOG0 | Opcode::LOG1 | Opcode::LOG2 |
Opcode::LOG3 | Opcode::LOG4 => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(0)?[..]),
len: U256::from_big_endian(&stack.peek(1)?[..]),
}),
Opcode::CODECOPY | Opcode::CALLDATACOPY |
Opcode::RETURNDATACOPY => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(0)?[..]),
len: U256::from_big_endian(&stack.peek(2)?[..]),
}),
Opcode::EXTCODECOPY => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(1)?[..]),
len: U256::from_big_endian(&stack.peek(3)?[..]),
}),
Opcode::MLOAD | Opcode::MSTORE => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(0)?[..]),
len: U256::from(32),
}),
Opcode::MSTORE8 => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(0)?[..]),
len: U256::from(1),
}),
Opcode::CREATE | Opcode::CREATE2 => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(1)?[..]),
len: U256::from_big_endian(&stack.peek(2)?[..]),
}),
Opcode::CALL | Opcode::CALLCODE => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(3)?[..]),
len: U256::from_big_endian(&stack.peek(4)?[..]),
}.join(MemoryCost {
offset: U256::from_big_endian(&stack.peek(5)?[..]),
len: U256::from_big_endian(&stack.peek(6)?[..]),
})),
Opcode::DELEGATECALL |
Opcode::STATICCALL => Some(MemoryCost {
offset: U256::from_big_endian(&stack.peek(2)?[..]),
len: U256::from_big_endian(&stack.peek(3)?[..]),
}.join(MemoryCost {
offset: U256::from_big_endian(&stack.peek(4)?[..]),
len: U256::from_big_endian(&stack.peek(5)?[..]),
})),
_ => None,
};
Ok((gas_cost, memory_cost))
}
/// Holds the gas consumption for a Gasometer instance.
#[derive(Clone)]
struct Inner<'config> {
memory_gas: u64,
used_gas: u64,
refunded_gas: i64,
config: &'config Config,
}
impl<'config> Inner<'config> {
fn memory_gas(
&self,
memory: MemoryCost,
) -> Result<u64, ExitError> {
let from = memory.offset;
let len = memory.len;
if len == U256::zero() {
return Ok(self.memory_gas)
}
let end = from.checked_add(len).ok_or(ExitError::OutOfGas)?;
if end > U256::from(usize::max_value()) {
return Err(ExitError::OutOfGas)
}
let end = end.as_usize();
let rem = end % 32;
let new = if rem == 0 {
end / 32
} else {
end / 32 + 1
};
Ok(max(self.memory_gas, memory::memory_gas(new)?))
}
fn extra_check(
&self,
cost: GasCost,
after_gas: u64,
) -> Result<(), ExitError> {
match cost {
GasCost::Call { gas, .. } => costs::call_extra_check(gas, after_gas, self.config),
GasCost::CallCode { gas, .. } => costs::call_extra_check(gas, after_gas, self.config),
GasCost::DelegateCall { gas, .. } => costs::call_extra_check(gas, after_gas, self.config),
GasCost::StaticCall { gas, .. } => costs::call_extra_check(gas, after_gas, self.config),
_ => Ok(()),
}
}
/// Returns the gas cost numerical value.
fn gas_cost(
&self,
cost: GasCost,
gas: u64,
) -> Result<u64, ExitError> {
Ok(match cost {
GasCost::Call { value, target_exists, .. } =>
costs::call_cost(value, true, true, !target_exists, self.config),
GasCost::CallCode { value, target_exists, .. } =>
costs::call_cost(value, true, false, !target_exists, self.config),
GasCost::DelegateCall { target_exists, .. } =>
costs::call_cost(U256::zero(), false, false, !target_exists, self.config),
GasCost::StaticCall { target_exists, .. } =>
costs::call_cost(U256::zero(), false, true, !target_exists, self.config),
GasCost::Suicide { value, target_exists, .. } =>
costs::suicide_cost(value, target_exists, self.config),
GasCost::SStore { .. } if self.config.estimate => self.config.gas_sstore_set,
GasCost::SStore { original, current, new } =>
costs::sstore_cost(original, current, new, gas, self.config)?,
GasCost::Sha3 { len } => costs::sha3_cost(len)?,
GasCost::Log { n, len } => costs::log_cost(n, len)?,
GasCost::ExtCodeCopy { len } => costs::extcodecopy_cost(len, self.config)?,
GasCost::VeryLowCopy { len } => costs::verylowcopy_cost(len)?,
GasCost::Exp { power } => costs::exp_cost(power, self.config)?,
GasCost::Create => consts::G_CREATE,
GasCost::Create2 { len } => costs::create2_cost(len)?,
GasCost::SLoad => self.config.gas_sload,
GasCost::Zero => consts::G_ZERO,
GasCost::Base => consts::G_BASE,
GasCost::VeryLow => consts::G_VERYLOW,
GasCost::Low => consts::G_LOW,
GasCost::Invalid => return Err(ExitError::OutOfGas),
GasCost::ExtCodeSize => self.config.gas_ext_code,
GasCost::Balance => self.config.gas_balance,
GasCost::BlockHash => consts::G_BLOCKHASH,
GasCost::ExtCodeHash => self.config.gas_ext_code_hash,
})
}
fn gas_refund(
&self,
cost: GasCost
) -> i64 {
match cost {
_ if self.config.estimate => 0,
GasCost::SStore { original, current, new } =>
costs::sstore_refund(original, current, new, self.config),
GasCost::Suicide { already_removed, .. } =>
costs::suicide_refund(already_removed),
_ => 0,
}
}
}
/// Gas cost.
#[derive(Debug, Clone, Copy)]
pub enum GasCost {
/// Zero gas cost.
Zero,
/// Base gas cost.
Base,
/// Very low gas cost.
VeryLow,
/// Low gas cost.
Low,
/// Fail the gasometer.
Invalid,
/// Gas cost for `EXTCODESIZE`.
ExtCodeSize,
/// Gas cost for `BALANCE`.
Balance,
/// Gas cost for `BLOCKHASH`.
BlockHash,
/// Gas cost for `EXTBLOCKHASH`.
ExtCodeHash,
/// Gas cost for `CALL`.
Call {
/// Call value.
value: U256,
/// Call gas.
gas: U256,
/// Whether the target exists.
target_exists: bool
},
/// Gas cost for `CALLCODE.
CallCode {
/// Call value.
value: U256,
/// Call gas.
gas: U256,
/// Whether the target exists.
target_exists: bool
},
/// Gas cost for `DELEGATECALL`.
DelegateCall {
/// Call gas.
gas: U256,
/// Whether the target exists.
target_exists: bool
},
/// Gas cost for `STATICCALL`.
StaticCall {
/// Call gas.
gas: U256,
/// Whether the target exists.
target_exists: bool
},
/// Gas cost for `SUICIDE`.
Suicide {
/// Value.
value: U256,
/// Whether the target exists.
target_exists: bool,
/// Whether the target has already been removed.
already_removed: bool
},
/// Gas cost for `SSTORE`.
SStore {
/// Original value.
original: H256,
/// Current value.
current: H256,
/// New value.
new: H256
},
/// Gas cost for `SHA3`.
Sha3 {
/// Length of the data.
len: U256
},
/// Gas cost for `LOG`.
Log {
/// Topic length.
n: u8,
/// Data length.
len: U256
},
/// Gas cost for `EXTCODECOPY`.
ExtCodeCopy {
/// Length.
len: U256
},
/// Gas cost for some copy opcodes that is documented as `VERYLOW`.
VeryLowCopy {
/// Length.
len: U256
},
/// Gas cost for `EXP`.
Exp {
/// Power of `EXP`.
power: U256
},
/// Gas cost for `CREATE`.
Create,
/// Gas cost for `CREATE2`.
Create2 {
/// Length.
len: U256
},
/// Gas cost for `SLOAD`.
SLoad,
}
/// Memory cost.
#[derive(Debug, Clone, Copy)]
pub struct MemoryCost {
/// Affected memory offset.
pub offset: U256,
/// Affected length.
pub len: U256,
}
/// Transaction cost.
#[derive(Debug, Clone, Copy)]
pub enum TransactionCost {
/// Call transaction cost.
Call {
/// Length of zeros in transaction data.
zero_data_len: usize,
/// Length of non-zeros in transaction data.
non_zero_data_len: usize
},
/// Create transaction cost.
Create {
/// Length of zeros in transaction data.
zero_data_len: usize,
/// Length of non-zeros in transaction data.
non_zero_data_len: usize
},
}
impl MemoryCost {
/// Join two memory cost together.
pub fn join(self, other: MemoryCost) -> MemoryCost {
if self.len == U256::zero() {
return other
}
if other.len == U256::zero() {
return self
}
let self_end = self.offset.saturating_add(self.len);
let other_end = other.offset.saturating_add(other.len);
if self_end >= other_end {
self
} else {
other
}
}
}
|
use crate::tracing::Event::*;
|
spec_tests.rs
|
use super::*;
#[test]
fn vram_addr_increment() {
let ppu_ctrl = new_control_register(0b00000000);
assert_eq!(IncrementAmount::One, ppu_ctrl.vram_addr_increment());
let ppu_ctrl = new_control_register(0b00000100);
assert_eq!(IncrementAmount::ThirtyTwo, ppu_ctrl.vram_addr_increment());
}
#[test]
fn sprite_size() {
let ppu_ctrl = new_control_register(0b00000000);
assert_eq!(SpriteSize::X8, ppu_ctrl.sprite_size());
let ppu_ctrl = new_control_register(0b00100000);
assert_eq!(SpriteSize::X16, ppu_ctrl.sprite_size());
}
#[test]
fn nmi_on_vblank_start() {
let ppu_ctrl = new_control_register(0b00000000);
assert_eq!(false, ppu_ctrl.nmi_on_vblank_start());
let ppu_ctrl = new_control_register(0b10000000);
assert_eq!(true, ppu_ctrl.nmi_on_vblank_start());
}
fn new_control_register(val: u8) -> ControlRegister {
ControlRegister { reg: val }
|
}
|
|
geohash_test.go
|
// Copyright (C) 2014 Constantin Schomburg <[email protected]>
// Based on Tomi Hiltunen's work (https://github.com/TomiHiltunen/geohash-golang)
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file.
package location
import "testing"
type geohashTest struct {
input string
output *BoundingBox
}
func TestDecodeGeohash(t *testing.T) {
var tests = []geohashTest{
{"d", &BoundingBox{0, 45, -90, -45}},
{"dr", &BoundingBox{39.375, 45, -78.75, -67.5}},
{"dr1", &BoundingBox{39.375, 40.78125, -77.34375, -75.9375}},
{"dr12", &BoundingBox{39.375, 39.55078125, -76.9921875, -76.640625}},
}
|
t.Errorf("expected bounding box %v, got %v", test.output, box)
}
}
}
type encodeTest struct {
lat float64
lng float64
geohash string
}
func TestEncodeGeohash(t *testing.T) {
var tests = []encodeTest{
{39.55078125, -76.640625, "dr12zzzzzzzz"},
{39.5507, -76.6406, "dr18bpbp88fe"},
{39.55, -76.64, "dr18bpb7qw65"},
{39, -76, "dqcvyedrrwut"},
}
for _, test := range tests {
geohash := EncodeGeohash(test.lat, test.lng, 12)
if test.geohash != geohash {
t.Errorf("expectd %s, got %s", test.geohash, geohash)
}
}
for prec := range []int{3, 4, 5, 6, 7, 8} {
for _, test := range tests {
geohash := EncodeGeohash(test.lat, test.lng, prec)
if len(geohash) != prec {
t.Errorf("expected len %d, got %d", prec, len(geohash))
}
if test.geohash[0:prec] != geohash {
t.Errorf("expectd %s, got %s", test.geohash, geohash)
}
}
}
}
|
for _, test := range tests {
box := DecodeGeohash(test.input)
if *test.output != *box {
|
websocket.rs
|
use actix::{Actor, StreamHandler};
use actix_web::web::Bytes;
use actix_web_actors::ws;
struct Ws;
impl Actor for Ws {
type Context = ws::WebsocketContext<Self>;
}
impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Ws {
fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, ctx: &mut Self::Context) {
match msg {
Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
Ok(ws::Message::Text(text)) => ctx.text(self.text(text)),
Ok(ws::Message::Binary(bin)) => ctx.binary(self.binary(bin)),
_ => (),
}
}
}
impl Ws {
fn text(&self,msg: String) -> String {
"".to_string()
}
fn binary(&self,msg:Bytes)->Bytes
|
}
|
{
Bytes::from("")
}
|
sign_sample.py
|
# -*- coding: UTF-8 -*-
import hashlib
import hmac
import string
import datetime
AUTHORIZATION = "authorization"
BCE_PREFIX = "x-bce-"
DEFAULT_ENCODING = 'UTF-8'
# AK/SK Storage Class
class BceCredentials(object):
def __init__(self, access_key_id, secret_access_key):
self.access_key_id = access_key_id
self.secret_access_key = secret_access_key
# Encode every character according to RFC 3986, except:
# 1.Alphabet in upper or lower case
# 2.Numbers
# 3.Dot '.', wave '~', minus '-' and underline '_'
RESERVED_CHAR_SET = set(string.ascii_letters + string.digits + '.~-_')
def get_normalized_char(i):
char = chr(i)
if char in RESERVED_CHAR_SET:
return char
else:
return '%%%02X' % i
NORMALIZED_CHAR_LIST = [get_normalized_char(i) for i in range(256)]
def normalize_string(in_str, encoding_slash=True):
if in_str is None:
return ''
# Encode unicode with UTF-8 before normalizing
in_str = in_str.encode(DEFAULT_ENCODING) if isinstance(in_str, unicode) else str(in_str)
if encoding_slash:
encode_f = lambda c: NORMALIZED_CHAR_LIST[ord(c)]
else:
encode_f = lambda c: NORMALIZED_CHAR_LIST[ord(c)] if c != '/' else c
return ''.join([encode_f(ch) for ch in in_str])
def get_canonical_time(timestamp=0):
# return current timestamp by default
if timestamp == 0:
utctime = datetime.datetime.utcnow()
else:
utctime = datetime.datetime.utcfromtimestamp(timestamp)
# Format of timestamp: [year]-[month]-[day]T[hour]:[minute]:[second]Z
return "%04d-%02d-%02dT%02d:%02d:%02dZ" % (
utctime.year, utctime.month, utctime.day,
utctime.hour, utctime.minute, utctime.second)
def get_canonical_uri(path):
# Format of canonical URI: /{object}, will encode every character except slash '/'
re
|
def get_canonical_querystring(params):
if params is None:
return ''
# Processing every query string except authorization
result = ['%s=%s' % (k, normalize_string(v)) for k, v in params.items() if k.lower != AUTHORIZATION]
# Sort in alphabet order
result.sort()
# Catenate all strings with &
return '&'.join(result)
def get_canonical_headers(headers, headers_to_sign=None):
headers = headers or {}
# If you don't specify header_to_sign, will use:
# 1.host
# 2.content-md5
# 3.content-length
# 4.content-type
# 5.all the headers begin with x-bce-
if headers_to_sign is None or len(headers_to_sign) == 0:
headers_to_sign = {"host", "content-md5", "content-length", "content-type"}
# Strip key in headers and change them to lower case
# Convert value in headers to string and strip them
f = lambda (key, value): (key.strip().lower(), str(value).strip())
result = []
for k, v in map(f, headers.iteritems()):
# Headers begin with x-bce- should be in canonical headers in any case
if k.startswith(BCE_PREFIX) or k in headers_to_sign:
result.append("%s:%s" % (normalize_string(k), normalize_string(v)))
# Sort in alphabet order
result.sort()
# Catenate all strings with \n
return '\n'.join(result)
def sign(credentials, http_method, path, headers, params,
timestamp=0, expiration_in_seconds=1800, headers_to_sign=None):
headers = headers or {}
params = params or {}
# 1.Generate sign key
# 1.1.Build auth-string,format:bce-auth-v1/{accessKeyId}/{timestamp}/{expirationPeriodInSeconds}
sign_key_info = 'bce-auth-v1/%s/%s/%d' % (
credentials.access_key_id,
get_canonical_time(timestamp),
expiration_in_seconds)
# 1.2.Generate sign key with auth-string and SK using SHA-256
sign_key = hmac.new(
credentials.secret_access_key,
sign_key_info,
hashlib.sha256).hexdigest()
# 2.Generate canonical uri
canonical_uri = get_canonical_uri(path)
# 3.Generate canonical query string
canonical_querystring = get_canonical_querystring(params)
# 4.Generate canonical headers
canonical_headers = get_canonical_headers(headers, headers_to_sign)
# 5.Generate string to sign with results from step 2,3 and 4
string_to_sign = '\n'.join(
[http_method, canonical_uri, canonical_querystring, canonical_headers])
# 6.Generate signature with string to sign and sign key using SHA-256
sign_result = hmac.new(sign_key, string_to_sign, hashlib.sha256).hexdigest()
# 7.Catenate result string
if headers_to_sign:
# header to sign specified by caller
result = '%s/%s/%s' % (sign_key_info, ';'.join(headers_to_sign), sign_result)
else:
# header to sign not specified by caller
result = '%s//%s' % (sign_key_info, sign_result)
return result
if __name__ == "__main__":
credentials = BceCredentials("your_AK","your_SK")
http_method = "PUT"
path = "/v1/test/myfolder/readme.txt"
headers = {"host": "bj.bcebos.com",
"content-length": 8,
"content-md5": "0a52730597fb4ffa01fc117d9e71e3a9",
"content-type":"text/plain",
"x-bce-date": "2015-04-27T08:23:49Z"}
params = {"partNumber": 9,
"uploadId": "VXBsb2FkIElpZS5tMnRzIHVwbG9hZA"}
timestamp = 1430123029
result = sign(credentials, http_method, path, headers, params, timestamp)
print result
|
turn normalize_string(path, False)
|
orders.module.ts
|
import { NgModule } from '@angular/core';
import { CommonModule } from '@angular/common';
import { SharedModule } from '@shared/shared.module';
import { OrdersRoutingModule } from './orders-routing.module';
import { OrdersComponent } from './orders.component';
@NgModule({
declarations: [OrdersComponent],
|
imports: [CommonModule, OrdersRoutingModule, SharedModule],
})
export class OrdersModule {}
| |
builder.py
|
# Copyright (c) OpenMMLab. All rights reserved.
import copy
import platform
import random
from functools import partial
import numpy as np
from mmcv.parallel import collate
from mmcv.runner import get_dist_info
from mmcv.utils import Registry, build_from_cfg
from torch.utils.data import DataLoader
from .samplers import DistributedGroupSampler, DistributedSampler, GroupSampler
DATA_ROOT = {
'BIT': './data/BIT',
'UT': './data/ut120',
'highfive': './data/highfive'
}
FRAMES_ROOT = {
'BIT': 'Bit-frames',
}
ANNO_ROOT = {
'BIT': 'BIT-anno/tidy_anno'
}
if platform.system() != 'Windows':
# https://github.com/pytorch/pytorch/issues/973
import resource
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
base_soft_limit = rlimit[0]
hard_limit = rlimit[1]
soft_limit = min(max(4096, base_soft_limit), hard_limit)
resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit))
HID_DATASETS = Registry('hid_dataset')
HID_PIPELINES = Registry('hid_pipeline')
def build_dataset(cfg, default_args=None):
|
def build_dataloader(dataset,
samples_per_gpu,
workers_per_gpu,
num_gpus=1,
dist=True,
shuffle=True,
seed=None,
**kwargs):
"""Build PyTorch DataLoader.
In distributed training, each GPU/process has a dataloader.
In non-distributed training, there is only one dataloader for all GPUs.
Args:
dataset (Dataset): A PyTorch dataset.
samples_per_gpu (int): Number of training samples on each GPU, i.e.,
batch size of each GPU.
workers_per_gpu (int): How many subprocesses to use for data loading
for each GPU.
num_gpus (int): Number of GPUs. Only used in non-distributed training.
dist (bool): Distributed training/test or not. Default: True.
shuffle (bool): Whether to shuffle the data at every epoch.
Default: True.
kwargs: any keyword argument to be used to initialize DataLoader
Returns:
DataLoader: A PyTorch dataloader.
"""
rank, world_size = get_dist_info()
if dist:
# DistributedGroupSampler will definitely shuffle the data to satisfy
# that images on each GPU are in the same group
if shuffle:
sampler = DistributedGroupSampler(
dataset, samples_per_gpu, world_size, rank, seed=seed)
else:
sampler = DistributedSampler(
dataset, world_size, rank, shuffle=False, seed=seed)
batch_size = samples_per_gpu
num_workers = workers_per_gpu
else:
sampler = GroupSampler(dataset, samples_per_gpu) if shuffle else None
batch_size = num_gpus * samples_per_gpu
num_workers = num_gpus * workers_per_gpu
init_fn = partial(
worker_init_fn, num_workers=num_workers, rank=rank,
seed=seed) if seed is not None else None
data_loader = DataLoader(
dataset,
batch_size=batch_size,
sampler=sampler,
num_workers=num_workers,
collate_fn=partial(collate, samples_per_gpu=samples_per_gpu),
pin_memory=False,
worker_init_fn=init_fn,
**kwargs)
return data_loader
def worker_init_fn(worker_id, num_workers, rank, seed):
# The seed of each worker equals to
# num_worker * rank + worker_id + user_seed
worker_seed = num_workers * rank + worker_id + seed
np.random.seed(worker_seed)
random.seed(worker_seed)
|
dataset = build_from_cfg(cfg, HID_DATASETS, default_args)
return dataset
|
article.go
|
package mysql
|
import "time"
type Article struct {
Id int `gorm:"column:id" json:"id"`
Title string `gorm:"column:title" json:"title"`
Content string `gorm:"column:content" json:"content"`
Cover string `gorm:"column:cover" json:"cover"`
Description string `gorm:"column:description" json:"description"`
Created_at *time.Time `gorm:"column:created_at" json:"created_at"`
Updated_at *time.Time `gorm:"column:updated_at" json:"updated_at"`
}
func (self Article) TableName() string {
return "blog_article"
}
| |
vector.rs
|
use std::{
borrow::Borrow,
ffi::c_void,
fmt,
iter::FromIterator,
marker::PhantomData,
mem::ManuallyDrop,
slice,
};
pub use iter::{VectorIterator, VectorRefIterator};
pub use vector_extern::{VectorElement, VectorExtern, VectorExternCopyNonBool};
use crate::{
platform_types::size_t,
Result,
traits::{Boxed, OpenCVType, OpenCVTypeArg, OpenCVTypeExternContainer},
};
mod vector_extern;
mod iter;
/// Wrapper for C++ [std::vector](https://en.cppreference.com/w/cpp/container/vector)
pub struct Vector<T: VectorElement> where Self: VectorExtern<T> {
ptr: *mut c_void,
_d: PhantomData<T>,
}
impl<T: VectorElement> Vector<T> where Self: VectorExtern<T> {
/// Create a new Vector
pub fn new() -> Self {
unsafe { Self::from_raw(Self::extern_new()) }
}
/// Create a Vector with pre-defined capacity
pub fn with_capacity(capacity: size_t) -> Self {
let mut out = Self::new();
out.reserve(capacity);
out
}
/// Create a Vector from iterator
pub fn from_iter<'a>(s: impl IntoIterator<Item=<T as OpenCVType<'a>>::Arg>) -> Self {
<Self as FromIterator<_>>::from_iter(s)
}
/// Return Vector length
pub fn len(&self) -> size_t {
unsafe { self.extern_len() }
}
/// Return true if Vector is empty
pub fn is_empty(&self) -> bool {
unsafe { self.extern_is_empty() }
}
/// Return Vector current capacity
pub fn capacity(&self) -> size_t {
unsafe { self.extern_capacity() }
}
/// Free extra capacity
pub fn shrink_to_fit(&mut self) {
unsafe { self.extern_shrink_to_fit() }
}
/// Reserve capacity for `additional` new elements
pub fn reserve(&mut self, additional: size_t) {
unsafe { self.extern_reserve(additional) }
}
/// Remove all elements
pub fn clear(&mut self) {
unsafe { self.extern_clear() }
}
/// Remove the element at the specified `index`
pub fn remove(&mut self, index: size_t) -> Result<()> {
vector_index_check(index, self.len())?;
unsafe { self.extern_remove(index) }
Ok(())
}
/// Swap 2 elements in the Vector
pub fn swap(&mut self, index1: size_t, index2: size_t) -> Result<()> {
let len = self.len();
vector_index_check(index1, len)?;
vector_index_check(index2, len)?;
if index1 != index2 {
unsafe { self.extern_swap(index1, index2) }
}
Ok(())
}
/// Add new element
pub fn push(&mut self, val: <T as OpenCVType>::Arg) {
let val = val.opencv_into_extern_container_nofail();
unsafe { self.extern_push(val.opencv_as_extern()) }
}
pub(crate) fn push_owned(&mut self, val: T) {
let val = val.opencv_into_extern_container_nofail();
unsafe { self.extern_push_owned(val.opencv_as_extern()) }
}
/// Insert a new element at the specified `index`
pub fn insert(&mut self, index: size_t, val: <T as OpenCVType>::Arg) -> Result<()> {
vector_index_check(index, self.len() + 1)?;
let val = val.opencv_into_extern_container()?;
unsafe { self.extern_insert(index, val.opencv_as_extern()) }
Ok(())
}
/// Set element at the specified `index`
pub fn set(&mut self, index: size_t, val: <T as OpenCVType>::Arg) -> Result<()> {
vector_index_check(index, self.len())?;
let val = val.opencv_into_extern_container()?;
unsafe { self.extern_set(index, val.opencv_as_extern()) }
Ok(())
}
/// Same as `set()` but without bounds checking
pub unsafe fn set_unchecked(&mut self, index: size_t, val: <T as OpenCVType>::Arg) {
let val = val.opencv_into_extern_container_nofail();
self.extern_set(index, val.opencv_as_extern())
}
/// Get element at the specified `index`
pub fn get(&self, index: size_t) -> Result<T> {
vector_index_check(index, self.len())?;
unsafe { self.extern_get(index) }
.into_result()
.map(|s| unsafe { T::opencv_from_extern(s) } )
}
/// Same as `get()` but without bounds checking
pub unsafe fn get_unchecked(&self, index: size_t) -> T {
self.extern_get(index)
.into_result()
.map(|s| T::opencv_from_extern(s) )
.unwrap() // fixme, make it return value directly
}
pub fn iter(&self) -> VectorRefIterator<T> {
VectorRefIterator::new(self)
}
pub fn as_slice(&self) -> &[T] where Self: VectorExternCopyNonBool<T> {
unsafe {
slice::from_raw_parts(self.extern_data(), self.len())
}
}
pub fn as_mut_slice(&mut self) -> &mut [T] where Self: VectorExternCopyNonBool<T> {
unsafe {
slice::from_raw_parts_mut(self.extern_data_mut(), self.len())
}
}
pub fn to_vec(&self) -> Vec<T> {
T::convert_to_vec(self)
}
}
impl<T: VectorElement> Default for Vector<T> where Self: VectorExtern<T> {
#[inline]
fn default() -> Vector<T> {
Vector::new()
}
}
impl<T: VectorElement> From<Vector<T>> for Vec<T> where Vector<T>: VectorExtern<T> {
#[inline]
fn from(from: Vector<T>) -> Self {
from.to_vec()
}
}
impl<'a, T: VectorElement> FromIterator<<T as OpenCVType<'a>>::Arg> for Vector<T> where Self: VectorExtern<T> {
fn from_iter<I: IntoIterator<Item=<T as OpenCVType<'a>>::Arg>>(s: I) -> Vector<T> {
let mut out = Self::new();
out.extend(s);
out
}
}
impl<T: VectorElement> AsRef<[T]> for Vector<T> where Self: VectorExtern<T> + VectorExternCopyNonBool<T> {
#[inline]
fn as_ref(&self) -> &[T] {
self.as_slice()
}
}
impl<T: VectorElement> Borrow<[T]> for Vector<T> where Self: VectorExtern<T> + VectorExternCopyNonBool<T> {
#[inline]
fn borrow(&self) -> &[T] {
self.as_slice()
}
}
impl<T: VectorElement + fmt::Debug> fmt::Debug for Vector<T> where Self: VectorExtern<T> {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl<T: VectorElement> Drop for Vector<T> where Self: VectorExtern<T> {
fn drop(&mut self)
|
}
impl<'a, T: VectorElement> Extend<<T as OpenCVType<'a>>::Arg> for Vector<T> where Self: VectorExtern<T> {
fn extend<I: IntoIterator<Item=<T as OpenCVType<'a>>::Arg>>(&mut self, s: I) {
let s = s.into_iter();
let (lo, hi) = s.size_hint();
self.reserve(hi.unwrap_or(lo));
s.into_iter().for_each(|elem| {
self.push(elem);
});
}
}
impl<T: VectorElement> Boxed for Vector<T> where Self: VectorExtern<T> {
#[inline]
unsafe fn from_raw(ptr: *mut c_void) -> Self {
Self { ptr, _d: PhantomData }
}
#[inline]
fn into_raw(self) -> *mut c_void {
ManuallyDrop::new(self).ptr
}
#[inline]
fn as_raw(&self) -> *const c_void {
self.ptr
}
#[inline]
fn as_raw_mut(&mut self) -> *mut c_void {
self.ptr
}
}
impl<T: VectorElement> OpenCVType<'_> for Vector<T> where Self: VectorExtern<T> {
type Arg = Self;
type ExternReceive = *mut c_void;
type ExternContainer = Self;
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> {
Ok(self)
}
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
self
}
#[inline]
unsafe fn opencv_from_extern(s: Self::ExternReceive) -> Self {
Self::from_raw(s)
}
}
impl<T: VectorElement> OpenCVTypeArg<'_> for Vector<T> where Self: VectorExtern<T> {
type ExternContainer = Self;
#[inline]
fn opencv_into_extern_container(self) -> Result<Self::ExternContainer> {
Ok(self)
}
#[inline]
fn opencv_into_extern_container_nofail(self) -> Self::ExternContainer {
self
}
}
impl<T: VectorElement> OpenCVTypeExternContainer for Vector<T> where Self: VectorExtern<T> {
type ExternSend = *const c_void;
type ExternSendMut = *mut c_void;
#[inline]
fn opencv_as_extern(&self) -> Self::ExternSend {
self.as_raw()
}
#[inline]
fn opencv_as_extern_mut(&mut self) -> Self::ExternSendMut {
self.as_raw_mut()
}
}
#[inline(always)]
pub(crate) fn vector_index_check(index: size_t, len: size_t) -> crate::Result<()> {
if index >= len {
Err(crate::Error::new(crate::core::StsOutOfRange, format!("Index: {} out of bounds: 0..{}", index, len)))
} else {
Ok(())
}
}
|
{
unsafe { self.extern_delete() }
}
|
positive_semidefinite_kernel.py
|
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""PositiveSemidefiniteKernel base."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import contextlib
import functools
import operator
import six
import tensorflow.compat.v2 as tf
from tensorflow_probability.python.positive_semidefinite_kernels.internal import util
__all__ = [
'PositiveSemidefiniteKernel',
]
@six.add_metaclass(abc.ABCMeta)
class PositiveSemidefiniteKernel(tf.Module):
"""Abstract base class for positive semi-definite kernel functions.
#### Background
For any set `S`, a real- (or complex-valued) function `k` on the Cartesian
product `S x S` is called positive semi-definite if we have
```none
sum_i sum_j (c[i]*) c[j] k(x[i], x[j]) >= 0
```
for any finite collections `{x[1], ..., x[N]}` in S and `{c[1], ..., c[N]}` in
the reals (or the complex plane). '*' denotes the complex conjugate, in the
complex case.
Some examples:
- `S` is R, and `k(s, t) = (s - a) (t - b)`, where a, b are in R. This
corresponds to a linear kernel.
- `S` is R^+ U {0}, and `k(s, t) = min(s, t)`. This corresponds to a kernel
for a Wiener process.
- `S` is the set of strings over an alphabet `A = {c1, ... cC}`, and
`k(s, t)` is defined via some similarity metric over strings.
We model positive semi-definite functions (*kernels*, in common machine
learning parlance) as classes with 3 primary public methods: `apply`,
`matrix`, and `tensor`.
`apply` computes the value of the kernel function at a pair of (batches of)
input locations. It is the more "low-level" operation: `matrix` and `tensor`
are implemented in terms of `apply`.
`matrix` computes the value of the kernel *pairwise* on two (batches of)
lists of input examples. When the two collections are the same the result is
called the Gram (or Gramian) matrix
(https://en.wikipedia.org/wiki/Gramian_matrix).
`tensor` generalizes `matrix`, taking rank `k1` and `k2` collections of
input examples to a rank `k1 + k2` collection of kernel values.
#### Kernel Parameter Shape Semantics
PositiveSemidefiniteKernel implementations support batching of kernel
parameters and broadcasting of these parameters across batches of inputs. This
allows, for example, creating a single kernel object which acts like a
collection of kernels with different parameters. This might be useful for,
e.g., for exploring multiple random initializations in parallel during a
kernel parameter optimization procedure.
The interaction between kernel parameter shapes and input shapes (see below)
is somewhat subtle. The semantics are designed to make the most common use
cases easy, while not ruling out more intricate control. The overarching
principle is that kernel parameter batch shapes must be broadcastable with
input batch shapes (see below). Examples are provided in the method-level
documentation.
#### Input Shape Semantics
PositiveSemidefiniteKernel methods each support a notion of batching inputs;
see the method-level documentation for full details; here we describe the
overall semantics of input shapes. Inputs to PositiveSemidefiniteKernel
methods partition into 3 pieces:
```none
[b1, ..., bB, e1, ..., eE, f1, ..., fF]
'----------' '---------' '---------'
| | '-- Feature dimensions
| '-- Example dimensions
'-- Batch dimensions
```
- Feature dimensions correspond to the space over which the kernel is defined;
in typical applications inputs are vectors and this part of the shape is
rank-1. For example, if our kernel is defined over R^2 x R^2, each input is
a 2-D vector (a rank-1 tensor of shape `[2,]`) so that
`F = 1, [f1, ..., fF] = [2]`. If we defined a kernel over DxD matrices, its
domain would be R^(DxD) x R^(DxD), we would have `F = 2` and
`[f1, ..., fF] = [D, D]`. Feature shapes of inputs should be the same, but
no exception will be raised unless they are broadcast-incompatible.
- Batch dimensions describe collections of inputs which in some sense have
nothing to do with each other, but may be coupled to batches of kernel
parameters. It's required that batch dimensions of inputs broadcast with
each other, and with the kernel's overall batch shape.
- Example dimensions are shape elements which represent a collection of inputs
that in some sense "go together" (whereas batches are "independent"). The
exact semantics are different for the `apply`, `matrix` and `tensor` methods
(see method-level doc strings for more details). `apply` combines examples
together pairwise, much like the python built-in `zip`. `matrix` combines
examples pairwise for *all* pairs of elements from two rank-1 input
collections (lists), ie, it applies the kernel to all elements in the
cross-product of two lists of examples. `tensor` further generalizes
`matrix` to higher rank collections of inputs. Only `matrix` strictly
requires example dimensions to be present (and to be exactly rank 1),
although the typical usage of `apply` (eg, building a matrix diagonal) will
also have `example_ndims` 1.
##### Examples
```python
import tensorflow_probability as tfp
# Suppose `SomeKernel` acts on vectors (rank-1 tensors), ie number of
# feature dimensions is 1.
scalar_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=.5)
scalar_kernel.batch_shape
# ==> []
# `x` and `y` are batches of five 3-D vectors:
x = np.ones([5, 3], np.float32)
y = np.ones([5, 3], np.float32)
scalar_kernel.apply(x, y).shape
# ==> [5]
scalar_kernel.matrix(x, y).shape
# ==> [5, 5]
```
Now we can consider a kernel with batched parameters:
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=[.2, .5])
batch_kernel.batch_shape
# ==> [2]
# `x` and `y` are batches of five 3-D vectors:
x = np.ones([5, 3], np.float32)
y = np.ones([5, 3], np.float32)
batch_kernel.apply(x, y).shape
# ==> Error! [2] and [5] can't broadcast.
# We could solve this by telling `apply` to treat the 5 as an example dim:
batch_kernel.apply(x, y, example_ndims=1).shape
# ==> [2, 5]
# Note that example_ndims is implicitly 1 for a call to `matrix`, so the
# following just works:
batch_kernel.matrix(x, y).shape
# ==> [2, 5, 5]
```
"""
def __init__(self, feature_ndims, dtype=None, name=None):
"""Construct a PositiveSemidefiniteKernel (subclass) instance.
Args:
feature_ndims: Python `integer` indicating the number of dims (the rank)
of the feature space this kernel acts on.
dtype: `DType` on which this kernel operates.
name: Python `str` name prefixed to Ops created by this class. Default:
subclass name.
Raises:
ValueError: if `feature_ndims` is not an integer greater than 0
Inputs to PositiveSemidefiniteKernel methods partition into 3 pieces:
```none
[b1, ..., bB, e1, ..., eE, f1, ..., fF]
'----------' '---------' '---------'
| | '-- Feature dimensions
| '-- Example dimensions
'-- Batch dimensions
```
The `feature_ndims` argument declares how many of the right-most shape
dimensions belong to the feature dimensions. This enables us to predict
which shape dimensions will be 'reduced' away during kernel computation.
"""
if not (isinstance(feature_ndims, int) and feature_ndims > 0):
raise ValueError(
'`feature_ndims` must be a Python `integer` greater than zero. ' +
'Got: {}'.format(feature_ndims))
self._feature_ndims = feature_ndims
self._dtype = dtype
if not name or name[-1] != '/': # `name` is not a name scope
name = tf.name_scope(name or type(self).__name__).name
self._name = name
@property
def feature_ndims(self):
"""The number of feature dimensions.
Kernel functions generally act on pairs of inputs from some space like
```none
R^(d1 x ... x dD)
```
or, in words: rank-`D` real-valued tensors of shape `[d1, ..., dD]`. Inputs
can be vectors in some `R^N`, but are not restricted to be. Indeed, one
might consider kernels over matrices, tensors, or even more general spaces,
like strings or graphs.
Returns:
The number of feature dimensions (feature rank) of this kernel.
"""
return self._feature_ndims
@property
def dtype(self):
"""DType over which the kernel operates."""
return self._dtype
@property
def name(self):
"""Name prepended to all ops created by this class."""
return self._name
@property
def batch_shape(self):
"""The batch_shape property of a PositiveSemidefiniteKernel.
This property describes the fully broadcast shape of all kernel parameters.
For example, consider an ExponentiatedQuadratic kernel, which is
parameterized by an amplitude and length_scale:
```none
exp_quad(x, x') := amplitude * exp(||x - x'||**2 / length_scale**2)
```
The batch_shape of such a kernel is derived from broadcasting the shapes of
`amplitude` and `length_scale`. E.g., if their shapes were
```python
amplitude.shape = [2, 1, 1]
length_scale.shape = [1, 4, 3]
```
then `exp_quad`'s batch_shape would be `[2, 4, 3]`.
Note that this property defers to the private _batch_shape method, which
concrete implementation sub-classes are obliged to provide.
Returns:
`TensorShape` instance describing the fully broadcast shape of all
kernel parameters.
"""
return self._batch_shape()
def batch_shape_tensor(self):
"""The batch_shape property of a PositiveSemidefiniteKernel as a `Tensor`.
Returns:
`Tensor` which evaluates to a vector of integers which are the
fully-broadcast shapes of the kernel parameters.
"""
with tf.name_scope(self._name):
if self.batch_shape.is_fully_defined():
return tf.convert_to_tensor(
self.batch_shape.as_list(), dtype=tf.int32, name='batch_shape')
with tf.name_scope('batch_shape_tensor'):
return self._batch_shape_tensor()
@contextlib.contextmanager
def _name_scope(self, name=None, values=None):
"""Helper function to standardize op scope."""
with tf.name_scope(self.name):
values = [] if values is None else values
with tf.name_scope(name) as scope:
yield scope
def apply(self, x1, x2, example_ndims=0):
"""Apply the kernel function pairs of inputs.
Args:
x1: `Tensor` input to the kernel, of shape `B1 + E1 + F`, where `B1` and
`E1` may be empty (ie, no batch/example dims, resp.) and `F` (the
feature shape) must have rank equal to the kernel's `feature_ndims`
property. Batch shape must broadcast with the batch shape of `x2` and
with the kernel's batch shape. Example shape must broadcast with example
shape of `x2`. `x1` and `x2` must have the same *number* of example dims
(ie, same rank).
x2: `Tensor` input to the kernel, of shape `B2 + E2 + F`, where `B2` and
`E2` may be empty (ie, no batch/example dims, resp.) and `F` (the
feature shape) must have rank equal to the kernel's `feature_ndims`
property. Batch shape must broadcast with the batch shape of `x2` and
with the kernel's batch shape. Example shape must broadcast with example
shape of `x2`. `x1` and `x2` must have the same *number* of example
example_ndims: A python integer, the number of example dims in the inputs.
In essence, this parameter controls how broadcasting of the kernel's
batch shape with input batch shapes works. The kernel batch shape will
be broadcast against everything to the left of the combined example and
feature dimensions in the input shapes.
Returns:
`Tensor` containing the results of applying the kernel function to inputs
`x1` and `x2`. If the kernel parameters' batch shape is `Bk` then the
shape of the `Tensor` resulting from this method call is
`broadcast(Bk, B1, B2) + broadcast(E1, E2)`.
Given an index set `S`, a kernel function is mathematically defined as a
real- or complex-valued function on `S` satisfying the
positive semi-definiteness constraint:
```none
sum_i sum_j (c[i]*) c[j] k(x[i], x[j]) >= 0
```
for any finite collections `{x[1], ..., x[N]}` in `S` and
`{c[1], ..., c[N]}` in the reals (or the complex plane). '*' is the complex
conjugate, in the complex case.
This method most closely resembles the function described in the
mathematical definition of a kernel. Given a PositiveSemidefiniteKernel `k`
with scalar parameters and inputs `x` and `y` in `S`, `apply(x, y)` yields a
single scalar value.
#### Examples
```python
import tensorflow_probability as tfp
# Suppose `SomeKernel` acts on vectors (rank-1 tensors)
scalar_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=.5)
scalar_kernel.batch_shape
# ==> []
# `x` and `y` are batches of five 3-D vectors:
x = np.ones([5, 3], np.float32)
y = np.ones([5, 3], np.float32)
scalar_kernel.apply(x, y).shape
# ==> [5]
```
The above output is the result of vectorized computation of the five values
```none
[k(x[0], y[0]), k(x[1], y[1]), ..., k(x[4], y[4])]
```
Now we can consider a kernel with batched parameters:
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=[.2, .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.apply(x, y).shape
# ==> Error! [2] and [5] can't broadcast.
```
The parameter batch shape of `[2]` and the input batch shape of `[5]` can't
be broadcast together. We can fix this in either of two ways:
1. Give the parameter a shape of `[2, 1]` which will correctly
broadcast with `[5]` to yield `[2, 5]`:
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(
param=[[.2], [.5]])
batch_kernel.batch_shape
# ==> [2, 1]
batch_kernel.apply(x, y).shape
# ==> [2, 5]
```
2. By specifying `example_ndims`, which tells the kernel to treat the `5`
in the input shape as part of the "example shape", and "pushing" the
kernel batch shape to the left:
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=[.2, .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.apply(x, y, example_ndims=1).shape
# ==> [2, 5]
"""
with self._name_scope(self._name, values=[x1, x2]):
x1 = tf.convert_to_tensor(x1, name='x1')
x2 = tf.convert_to_tensor(x2, name='x2')
should_expand_dims = (example_ndims == 0)
if should_expand_dims:
example_ndims += 1
x1 = tf.expand_dims(x1, -(self.feature_ndims + 1))
x2 = tf.expand_dims(x2, -(self.feature_ndims + 1))
result = self._apply(x1, x2, example_ndims=example_ndims)
if should_expand_dims:
result = tf.squeeze(result, axis=-1)
return result
def _apply(self, x1, x2, example_ndims=1):
"""Apply the kernel function to a pair of (batches of) inputs.
Subclasses must implement this method. It will always be called with
example_ndims >= 1. Implementations should take care to respect
example_ndims, by padding parameters on the right with 1's example_ndims
times. See tests and existing subclasses for examples.
Args:
x1: `Tensor` input to the first positional parameter of the kernel, of
shape `B1 + E1 + F`, where `B1` may be empty (ie, no batch dims, resp.),
`E1` is a shape of rank at least 1, and `F` (the feature shape) must
have rank equal to the kernel's `feature_ndims` property. Batch shape
|
must broadcast with the batch shape of `x2` and with the kernel's batch
shape. Example shape must broadcast with example shape of `x2` (They
don't strictly need to be equal, e.g., when `apply` is called from
`matrix`, `x1` and `x2` each have 1's in opposing positions in their
example shapes). `x1` and `x2` must have the same *number* of example
dims (ie, same rank).
x2: `Tensor` input to the second positional parameter of the kernel,
shape `B2 + E2 + F`, where `B2` may be empty (ie, no batch dims, resp.),
`E2` is a shape of rank at least 1, and `F` (the feature shape) must
have rank equal to the kernel's `feature_ndims` property. Batch shape
must broadcast with the batch shape of `x1` and with the kernel's batch
shape. Example shape must broadcast with example shape of `x1` (They
don't strictly need to be equal, e.g., when `apply` is called from
`matrix`, `x1` and `x2` each have 1's in opposing positions in their
example shapes). `x1` and `x2` must have the same *number* of example
dims (ie, same rank).
example_ndims: A python integer greater than or equal to 1, the number of
example dims in the inputs. In essence, this parameter controls how
broadcasting of the kernel's batch shape with input batch shapes works.
The kernel batch shape will be broadcast against everything to the left
of the combined example and feature dimensions in the input shapes.
Returns:
`Tensor` containing the results of applying the kernel function to inputs
`x1` and `x2`. If the kernel parameters' batch shape is `Bk` then the
shape of the `Tensor` resulting from this method call is
`broadcast(Bk, B1, B2) + broadcast(E1, E2)`.
"""
raise NotImplementedError(
'Subclasses must provide `_apply` implementation.')
def matrix(self, x1, x2):
"""Construct (batched) matrices from (batches of) collections of inputs.
Args:
x1: `Tensor` input to the first positional parameter of the kernel, of
shape `B1 + [e1] + F`, where `B1` may be empty (ie, no batch dims,
resp.), `e1` is a single integer (ie, `x1` has example ndims exactly 1),
and `F` (the feature shape) must have rank equal to the kernel's
`feature_ndims` property. Batch shape must broadcast with the batch
shape of `x2` and with the kernel's batch shape.
x2: `Tensor` input to the second positional parameter of the kernel,
shape `B2 + [e2] + F`, where `B2` may be empty (ie, no batch dims,
resp.), `e2` is a single integer (ie, `x2` has example ndims exactly 1),
and `F` (the feature shape) must have rank equal to the kernel's
`feature_ndims` property. Batch shape must broadcast with the batch
shape of `x1` and with the kernel's batch shape.
Returns:
`Tensor` containing the matrix (possibly batched) of kernel applications
to pairs from inputs `x1` and `x2`. If the kernel parameters' batch shape
is `Bk` then the shape of the `Tensor` resulting from this method call is
`broadcast(Bk, B1, B2) + [e1, e2]` (note this differs from `apply`: the
example dimensions are concatenated, whereas in `apply` the example dims
are broadcast together).
Given inputs `x1` and `x2` of shapes
```none
[b1, ..., bB, e1, f1, ..., fF]
```
and
```none
[c1, ..., cC, e2, f1, ..., fF]
```
This method computes the batch of `e1 x e2` matrices resulting from applying
the kernel function to all pairs of inputs from `x1` and `x2`. The shape
of the batch of matrices is the result of broadcasting the batch shapes of
`x1`, `x2`, and the kernel parameters (see examples below). As such, it's
required that these shapes all be broadcast compatible. However, the kernel
parameter batch shapes need not broadcast against the 'example shapes' (`e1`
and `e2` above).
When the two inputs are the (batches of) identical collections, the
resulting matrix is the so-called Gram (or Gramian) matrix
(https://en.wikipedia.org/wiki/Gramian_matrix).
#### Examples
First, consider a kernel with a single scalar parameter.
```python
import tensorflow_probability as tfp
scalar_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=.5)
scalar_kernel.batch_shape
# ==> []
# Our inputs are two lists of 3-D vectors
x = np.ones([5, 3], np.float32)
y = np.ones([4, 3], np.float32)
scalar_kernel.matrix(x, y).shape
# ==> [5, 4]
```
The result comes from applying the kernel to the entries in `x` and `y`
pairwise, across all pairs:
```none
| k(x[0], y[0]) k(x[0], y[1]) ... k(x[0], y[3]) |
| k(x[1], y[0]) k(x[1], y[1]) ... k(x[1], y[3]) |
| ... ... ... |
| k(x[4], y[0]) k(x[4], y[1]) ... k(x[4], y[3]) |
```
Now consider a kernel with batched parameters with the same inputs
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=[1., .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.matrix(x, y).shape
# ==> [2, 5, 4]
```
This results in a batch of 2 matrices, one computed from the kernel with
`param = 1.` and the other with `param = .5`.
We also support batching of the inputs. First, let's look at that with
the scalar kernel again.
```python
# Batch of 10 lists of 5 vectors of dimension 3
x = np.ones([10, 5, 3], np.float32)
# Batch of 10 lists of 4 vectors of dimension 3
y = np.ones([10, 4, 3], np.float32)
scalar_kernel.matrix(x, y).shape
# ==> [10, 5, 4]
```
The result is a batch of 10 matrices built from the batch of 10 lists of
input vectors. These batch shapes have to be broadcastable. The following
will *not* work:
```python
x = np.ones([10, 5, 3], np.float32)
y = np.ones([20, 4, 3], np.float32)
scalar_kernel.matrix(x, y).shape
# ==> Error! [10] and [20] can't broadcast.
```
Now let's consider batches of inputs in conjunction with batches of kernel
parameters. We require that the input batch shapes be broadcastable with
the kernel parameter batch shapes, otherwise we get an error:
```python
x = np.ones([10, 5, 3], np.float32)
y = np.ones([10, 4, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(params=[1., .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.matrix(x, y).shape
# ==> Error! [2] and [10] can't broadcast.
```
The fix is to make the kernel parameter shape broadcastable with `[10]` (or
reshape the inputs to be broadcastable!):
```python
x = np.ones([10, 5, 3], np.float32)
y = np.ones([10, 4, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(
params=[[1.], [.5]])
batch_kernel.batch_shape
# ==> [2, 1]
batch_kernel.matrix(x, y).shape
# ==> [2, 10, 5, 4]
# Or, make the inputs broadcastable:
x = np.ones([10, 1, 5, 3], np.float32)
y = np.ones([10, 1, 4, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(
params=[1., .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.matrix(x, y).shape
# ==> [10, 2, 5, 4]
```
Here, we have the result of applying the kernel, with 2 different
parameters, to each of a batch of 10 pairs of input lists.
"""
with self._name_scope(self._name, values=[x1, x2]):
x1 = tf.convert_to_tensor(x1, name='x1')
x2 = tf.convert_to_tensor(x2, name='x2')
return self.tensor(x1, x2, x1_example_ndims=1, x2_example_ndims=1)
def tensor(self, x1, x2, x1_example_ndims, x2_example_ndims):
"""Construct (batched) tensors from (batches of) collections of inputs.
Args:
x1: `Tensor` input to the first positional parameter of the kernel, of
shape `B1 + E1 + F`, where `B1` and `E1` arbitrary shapes which may be
empty (ie, no batch/example dims, resp.), and `F` (the feature shape)
must have rank equal to the kernel's `feature_ndims` property. Batch
shape must broadcast with the batch shape of `x2` and with the kernel's
batch shape.
x2: `Tensor` input to the second positional parameter of the kernel,
shape `B2 + E2 + F`, where `B2` and `E2` arbitrary shapes which may be
empty (ie, no batch/example dims, resp.), and `F` (the feature shape)
must have rank equal to the kernel's `feature_ndims` property. Batch
shape must broadcast with the batch shape of `x1` and with the kernel's
batch shape.
x1_example_ndims: A python integer greater than or equal to 0, the number
of example dims in the first input. This affects both the alignment of
batch shapes and the shape of the final output of the function.
Everything left of the feature shape and the example dims in `x1` is
considered "batch shape", and must broadcast as specified above.
x2_example_ndims: A python integer greater than or equal to 0, the number
of example dims in the second input. This affects both the alignment of
batch shapes and the shape of the final output of the function.
Everything left of the feature shape and the example dims in `x1` is
considered "batch shape", and must broadcast as specified above.
Returns:
`Tensor` containing (possibly batched) kernel applications to pairs from
inputs `x1` and `x2`. If the kernel parameters' batch shape is `Bk` then
the shape of the `Tensor` resulting from this method call is
`broadcast(Bk, B1, B2) + E1 + E2`. Note this differs from `apply`: the
example dimensions are concatenated, whereas in `apply` the example dims
are broadcast together. It also differs from `matrix`: the example shapes
are arbitrary here, and the result accrues a rank equal to the sum of the
ranks of the input example shapes.
#### Examples
First, consider a kernel with a single scalar parameter.
```python
import tensorflow_probability as tfp
scalar_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=.5)
scalar_kernel.batch_shape
# ==> []
# Our inputs are two rank-2 collections of 3-D vectors
x = np.ones([5, 6, 3], np.float32)
y = np.ones([7, 8, 3], np.float32)
scalar_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> [5, 6, 7, 8]
# Empty example shapes work too!
x = np.ones([3], np.float32)
y = np.ones([5, 3], np.float32)
scalar_kernel.tensor(x, y, x1_example_ndims=0, x2_example_ndims=1).shape
# ==> [5]
```
The result comes from applying the kernel to the entries in `x` and `y`
pairwise, across all pairs:
```none
| k(x[0], y[0]) k(x[0], y[1]) ... k(x[0], y[3]) |
| k(x[1], y[0]) k(x[1], y[1]) ... k(x[1], y[3]) |
| ... ... ... |
| k(x[4], y[0]) k(x[4], y[1]) ... k(x[4], y[3]) |
```
Now consider a kernel with batched parameters.
```python
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(param=[1., .5])
batch_kernel.batch_shape
# ==> [2]
# Inputs are two rank-2 collections of 3-D vectors
x = np.ones([5, 6, 3], np.float32)
y = np.ones([7, 8, 3], np.float32)
scalar_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> [2, 5, 6, 7, 8]
```
We also support batching of the inputs. First, let's look at that with
the scalar kernel again.
```python
# Batch of 10 lists of 5x6 collections of dimension 3
x = np.ones([10, 5, 6, 3], np.float32)
# Batch of 10 lists of 7x8 collections of dimension 3
y = np.ones([10, 7, 8, 3], np.float32)
scalar_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> [10, 5, 6, 7, 8]
```
The result is a batch of 10 tensors built from the batch of 10 rank-2
collections of input vectors. The batch shapes have to be broadcastable.
The following will *not* work:
```python
x = np.ones([10, 5, 3], np.float32)
y = np.ones([20, 4, 3], np.float32)
scalar_kernel.tensor(x, y, x1_example_ndims=1, x2_example_ndims=1).shape
# ==> Error! [10] and [20] can't broadcast.
```
Now let's consider batches of inputs in conjunction with batches of kernel
parameters. We require that the input batch shapes be broadcastable with
the kernel parameter batch shapes, otherwise we get an error:
```python
x = np.ones([10, 5, 6, 3], np.float32)
y = np.ones([10, 7, 8, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(params=[1., .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> Error! [2] and [10] can't broadcast.
```
The fix is to make the kernel parameter shape broadcastable with `[10]` (or
reshape the inputs to be broadcastable!):
```python
x = np.ones([10, 5, 6, 3], np.float32)
y = np.ones([10, 7, 8, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(
params=[[1.], [.5]])
batch_kernel.batch_shape
# ==> [2, 1]
batch_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> [2, 10, 5, 6, 7, 8]
# Or, make the inputs broadcastable:
x = np.ones([10, 1, 5, 6, 3], np.float32)
y = np.ones([10, 1, 7, 8, 3], np.float32)
batch_kernel = tfp.positive_semidefinite_kernels.SomeKernel(
params=[1., .5])
batch_kernel.batch_shape
# ==> [2]
batch_kernel.tensor(x, y, x1_example_ndims=2, x2_example_ndims=2).shape
# ==> [10, 2, 5, 6, 7, 8]
```
"""
with self._name_scope(self._name, values=[x1, x2]):
x1 = tf.convert_to_tensor(x1, name='x1')
x2 = tf.convert_to_tensor(x2, name='x2')
x1 = util.pad_shape_with_ones(
x1,
ndims=x2_example_ndims,
start=-(self.feature_ndims + 1))
x2 = util.pad_shape_with_ones(
x2,
ndims=x1_example_ndims,
start=-(self.feature_ndims + 1 + x2_example_ndims))
return self.apply(
x1, x2, example_ndims=(x1_example_ndims + x2_example_ndims))
def _batch_shape(self):
raise NotImplementedError('Subclasses must provide batch_shape property.')
def _batch_shape_tensor(self):
raise NotImplementedError(
'Subclasses must provide batch_shape_tensor implementation')
def __add__(self, k):
if not isinstance(k, PositiveSemidefiniteKernel):
raise ValueError(
"Can't add non-kernel (of type '%s') to kernel" % type(k))
return _SumKernel([self, k])
def __iadd__(self, k):
return self.__add__(k)
def __mul__(self, k):
if not isinstance(k, PositiveSemidefiniteKernel):
raise ValueError(
"Can't multiply by non-kernel (of type '%s') to kernel" % type(k))
return _ProductKernel([self, k])
def __imul__(self, k):
return self.__mul__(k)
def __str__(self):
return ('tfp.positive_semidefinite_kernels.{type_name}('
'"{self_name}"'
'{maybe_batch_shape}'
', feature_ndims={feature_ndims}'
', dtype={dtype})'.format(
type_name=type(self).__name__,
self_name=self.name,
maybe_batch_shape=(', batch_shape={}'.format(self.batch_shape)
if self.batch_shape.ndims is not None
else ''),
feature_ndims=self.feature_ndims,
dtype=None if self.dtype is None else self.dtype.name))
def __repr__(self):
return ('<tfp.positive_semidefinite_kernels.{type_name} '
'\'{self_name}\''
' batch_shape={batch_shape}'
' feature_ndims={feature_ndims}'
' dtype={dtype}>'.format(
type_name=type(self).__name__,
self_name=self.name,
batch_shape=self.batch_shape,
feature_ndims=self.feature_ndims,
dtype=None if self.dtype is None else self.dtype.name))
def _flatten_summand_list(kernels):
"""Flatten a list of kernels which may contain _SumKernel instances.
Args:
kernels: Python list of `PositiveSemidefiniteKernel` instances
Returns:
Python list containing the elements of kernels, with any _SumKernel
instances replaced by their `kernels` property contents.
"""
flattened = []
for k in kernels:
if isinstance(k, _SumKernel):
flattened += k.kernels
else:
flattened.append(k)
return flattened
def _flatten_multiplicand_list(kernels):
"""Flatten a list of kernels which may contain _ProductKernel instances.
Args:
kernels: Python list of `PositiveSemidefiniteKernel` instances
Returns:
Python list containing the elements of kernels, with any _ProductKernel
instances replaced by their `kernels` property contents.
"""
flattened = []
for k in kernels:
if isinstance(k, _ProductKernel):
flattened += k.kernels
else:
flattened.append(k)
return flattened
class _SumKernel(PositiveSemidefiniteKernel):
"""Kernel class representing summation over a list of kernels.
Mathematically this class represents the pointwise sum of several kernels.
Given two kernels, `k1` and `k2`, and `kp = _SumKernel([k1, k2])`, we have
```none
kp.apply(x, y) = k1(x, y) + k2(x, y)
```
for any `x`, `y` in the feature space (this presumes that the constituent
kernels all act on the same feature space).
That the sum is positive semi-definite follows simply from the definition of
positive semi-definiteness of functions. If we have
```none
sum_i sum_j (c[i]*) c[j] k1(x[i], x[j]) >= 0
```
and
```none
sum_i sum_j (c[i]*) c[j] k2(x[i], x[j]) >= 0
```
for any finite collections `{x[1], ..., x[N]}` in S and `{c[1], ..., c[N]}` in
the reals (or the complex plane), then we clearly also have the same for the
sum of `k1` and `k2`.
"""
def __init__(self, kernels, name=None):
"""Create a kernel which is the sum of `kernels`.
The input list is 'flattened' in the sense that any entries which are also
of type `_SumKernel` will have their list of kernels appended to this
instance's list of kernels. This will reduce the stack depth when actually
evaluating the sum over kernel applications.
Args:
kernels: Python `list` of `PositiveSemidefiniteKernel` instances.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: `kernels` is an empty list, or `kernels` don't all have the
same `feature_ndims`.
"""
if not kernels:
raise ValueError("Can't create _SumKernel over empty list.")
if len(set([k.feature_ndims for k in kernels])) > 1:
raise ValueError(
"Can't sum kernels with different feature_ndims. Got:\n%s" %
str([k.feature_ndims for k in kernels]))
self._kernels = _flatten_summand_list(kernels)
if name is None:
name = 'SumKernel'
# We have ensured the list is non-empty and all feature_ndims are the same.
super(_SumKernel, self).__init__(
feature_ndims=kernels[0].feature_ndims,
dtype=util.maybe_get_common_dtype(
[None if k.dtype is None else k for k in kernels]),
name=name)
@property
def kernels(self):
"""The list of kernels this _SumKernel sums over."""
return self._kernels
def _apply(self, x1, x2, example_ndims=0):
return sum([k.apply(x1, x2, example_ndims) for k in self.kernels])
def _batch_shape(self):
return functools.reduce(tf.broadcast_static_shape,
[k.batch_shape for k in self.kernels])
def _batch_shape_tensor(self):
return functools.reduce(tf.broadcast_dynamic_shape,
[k.batch_shape_tensor() for k in self.kernels])
class _ProductKernel(PositiveSemidefiniteKernel):
"""Kernel class representing the product over a list of kernels.
Mathematically this class represents the pointwise product of several kernels.
Given two kernels, `k1` and `k2`, and `kp = _ProductKernel([k1, k2])`, we have
```none
kp.apply(x, y) = k1(x, y) * k2(x, y)
```
for any x, y in the feature space (this presumes that the constituent kernels
all act on the same feature space).
The fact that this product is still positive semi-definite can be shown in a
variety of ways, many deep and all fascinating, but follows readily from the
[Schur product theorem](https://en.wikipedia.org/wiki/Schur_product_theorem),
which states that the Hadamard (element-wise) product of two PSD matrices is
also PSD.
"""
def __init__(self, kernels, name=None):
"""Create a kernel which is the product of `kernels`.
The input list is 'flattened' in the sense that any entries which are also
of type `_ProductKernel` will have their list of kernels appended to this
instance's list of kernels. This will reduce the stack depth when actually
evaluating the product over kernel applications.
Args:
kernels: Python `list` of `PositiveSemidefiniteKernel` instances.
name: Python `str` name prefixed to Ops created by this class.
Raises:
ValueError: `kernels` is an empty list, or `kernels` don't all have the
same `feature_ndims`.
"""
if not kernels:
raise ValueError("Can't create _ProductKernel over empty list.")
if len(set([k.feature_ndims for k in kernels])) > 1:
raise ValueError(
"Can't multiply kernels with different feature_ndims. Got:\n%s" %
str([k.feature_ndims for k in kernels]))
self._kernels = _flatten_multiplicand_list(kernels)
if name is None:
name = 'ProductKernel'
# We have ensured the list is non-empty and all feature_ndims are the same.
super(_ProductKernel, self).__init__(
feature_ndims=kernels[0].feature_ndims,
dtype=util.maybe_get_common_dtype(
[None if k.dtype is None else k for k in kernels]),
name=name)
@property
def kernels(self):
"""The list of kernels this _ProductKernel multiplies over."""
return self._kernels
def _apply(self, x1, x2, example_ndims=0):
return functools.reduce(
operator.mul,
[k.apply(x1, x2, example_ndims) for k in self.kernels])
def _batch_shape(self):
return functools.reduce(tf.broadcast_static_shape,
[k.batch_shape for k in self.kernels])
def _batch_shape_tensor(self):
return functools.reduce(tf.broadcast_dynamic_shape,
[k.batch_shape_tensor() for k in self.kernels])
| |
tester.rs
|
extern crate cargo_issue_6659;
#[test]
fn
|
() {
assert_eq!(2 + 2, 4);
}
|
it_works
|
all.go
|
// Copyright © 2021 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package band
import (
"go.thethings.network/lorawan-stack/v3/pkg/ttnpb"
)
// All contains all the bands available.
var All = map[string]map[ttnpb.PHYVersion]Band{
AS_923: {
ttnpb.RP001_V1_0_2: AS_923_RP1_v1_0_2,
ttnpb.RP001_V1_0_2_REV_B: AS_923_RP1_v1_0_2_RevB,
ttnpb.RP001_V1_0_3_REV_A: AS_923_RP1_v1_0_3_RevA,
ttnpb.RP001_V1_1_REV_A: AS_923_RP1_v1_1_RevA,
ttnpb.RP001_V1_1_REV_B: AS_923_RP1_v1_1_RevB,
},
AU_915_928: {
ttnpb.TS001_V1_0_1: AU_915_928_TS1_v1_0_1,
ttnpb.RP001_V1_0_2: AU_915_928_RP1_v1_0_2,
ttnpb.RP001_V1_0_2_REV_B: AU_915_928_RP1_v1_0_2_RevB,
ttnpb.RP001_V1_0_3_REV_A: AU_915_928_RP1_v1_0_3_RevA,
ttnpb.RP001_V1_1_REV_A: AU_915_928_RP1_v1_1_RevA,
ttnpb.RP001_V1_1_REV_B: AU_915_928_RP1_v1_1_RevB,
},
CN_470_510: {
ttnpb.TS001_V1_0_1: CN_470_510_TS1_v1_0_1,
ttnpb.RP001_V1_0_2: CN_470_510_RP1_v1_0_2,
ttnpb.RP001_V1_0_2_REV_B: CN_470_510_RP1_v1_0_2_RevB,
ttnpb.RP001_V1_0_3_REV_A: CN_470_510_RP1_v1_0_3_RevA,
ttnpb.RP001_V1_1_REV_A: CN_470_510_RP1_v1_1_RevA,
ttnpb.RP001_V1_1_REV_B: CN_470_510_RP1_v1_1_RevB,
},
CN_779_787: {
ttnpb.TS001_V1_0: CN_779_787_RP1_V1_0,
ttnpb.TS001_V1_0_1: CN_779_787_RP1_V1_0_1,
ttnpb.RP001_V1_0_2: CN_779_787_RP1_V1_0_2,
ttnpb.RP001_V1_0_2_REV_B: CN_779_787_RP1_V1_0_2_RevB,
ttnpb.RP001_V1_0_3_REV_A: CN_779_787_RP1_V1_0_3_RevA,
ttnpb.RP001_V1_1_REV_A: CN_779_787_RP1_V1_1_RevA,
ttnpb.RP001_V1_1_REV_B: CN_779_787_RP1_V1_1_RevB,
},
EU_433: {
ttnpb.TS001_V1_0: EU_433_TS1_V1_0,
ttnpb.TS001_V1_0_1: EU_433_TS1_V1_0_1,
ttnpb.RP001_V1_0_2: EU_433_RP1_V1_0_2,
ttnpb.RP001_V1_0_2_REV_B: EU_433_RP1_V1_0_2_Rev_B,
ttnpb.RP001_V1_0_3_REV_A: EU_433_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: EU_433_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: EU_433_RP1_V1_1_Rev_B,
},
EU_863_870: {
ttnpb.TS001_V1_0: EU_863_870_TS1_V1_0,
ttnpb.TS001_V1_0_1: EU_863_870_TS1_V1_0_1,
ttnpb.RP001_V1_0_2: EU_863_870_RP1_V1_0_2,
ttnpb.RP001_V1_0_2_REV_B: EU_863_870_RP1_V1_0_2_Rev_B,
ttnpb.RP001_V1_0_3_REV_A: EU_863_870_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: EU_863_870_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: EU_863_870_RP1_V1_1_Rev_B,
},
IN_865_867: {
ttnpb.RP001_V1_0_2_REV_B: IN_865_867_RP1_V1_0_2_Rev_B,
ttnpb.RP001_V1_0_3_REV_A: IN_865_867_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: IN_865_867_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: IN_865_867_RP1_V1_1_Rev_B,
},
ISM_2400: {
ttnpb.TS001_V1_0: ISM_2400_Universal,
ttnpb.TS001_V1_0_1: ISM_2400_Universal,
ttnpb.RP001_V1_0_2: ISM_2400_Universal,
ttnpb.RP001_V1_0_2_REV_B: ISM_2400_Universal,
ttnpb.RP001_V1_0_3_REV_A: ISM_2400_Universal,
ttnpb.RP001_V1_1_REV_A: ISM_2400_Universal,
ttnpb.RP001_V1_1_REV_B: ISM_2400_Universal,
},
KR_920_923: {
ttnpb.RP001_V1_0_2: KR_920_923_RP1_V1_0_2,
ttnpb.RP001_V1_0_2_REV_B: KR_920_923_RP1_V1_0_2_Rev_B,
ttnpb.RP001_V1_0_3_REV_A: KR_920_923_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: KR_920_923_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: KR_920_923_RP1_V1_1_Rev_B,
},
RU_864_870: {
ttnpb.RP001_V1_0_3_REV_A: RU_864_870_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: RU_864_870_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: RU_864_870_RP1_V1_1_Rev_B,
},
US_902_928: {
ttnpb.TS001_V1_0: US_902_928_TS1_V1_0,
ttnpb.TS001_V1_0_1: US_902_928_TS1_V1_0_1,
ttnpb.RP001_V1_0_2: US_902_928_RP1_V1_0_2,
ttnpb.RP001_V1_0_2_REV_B: US_902_928_RP1_V1_0_2_Rev_B,
ttnpb.RP001_V1_0_3_REV_A: US_902_928_RP1_V1_0_3_Rev_A,
ttnpb.RP001_V1_1_REV_A: US_902_928_RP1_V1_1_Rev_A,
ttnpb.RP001_V1_1_REV_B: US_902_928_RP1_V1_1_Rev_B,
},
}
// Get returns the band if it was found, and returns an error otherwise.
func G
|
id string, version ttnpb.PHYVersion) (Band, error) {
versions, ok := All[id]
if !ok {
return Band{}, errBandNotFound.WithAttributes("id", id, "version", version)
}
band, ok := versions[version]
if !ok {
return Band{}, errBandNotFound.WithAttributes("id", id, "version", version)
}
return band, nil
}
const latestSupportedVersion = ttnpb.RP001_V1_1_REV_B
// GetLatest returns the latest version of the band if it was found,
// and returns an error otherwise.
func GetLatest(id string) (Band, error) {
return Get(id, latestSupportedVersion)
}
|
et(
|
import_.py
|
from aiocloudflare.commons.auth import Auth
class Import_(Auth):
|
_endpoint1 = "zones"
_endpoint2 = "dns_records/import"
_endpoint3 = None
|
|
iso_8601_datetime.rs
|
use anyhow::Error;
use chrono::{DateTime, TimeZone, Utc};
use serde::{self, Deserialize, Deserializer, Serializer};
pub fn
|
() -> DateTime<Utc> {
Utc.ymd(0, 1, 1).and_hms(0, 0, 0)
}
pub fn convert_datetime_to_str(datetime: DateTime<Utc>) -> String {
datetime.format("%Y-%m-%dT%H:%M:%SZ").to_string()
}
pub fn convert_str_to_datetime(s: &str) -> Result<DateTime<Utc>, Error> {
DateTime::parse_from_rfc3339(&s.replace("Z", "+00:00"))
.map(|x| x.with_timezone(&Utc))
.map_err(Into::into)
}
pub fn serialize<S>(date: &DateTime<Utc>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&convert_datetime_to_str(*date))
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<DateTime<Utc>, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
convert_str_to_datetime(&s).map_err(serde::de::Error::custom)
}
|
sentinel_datetime
|
rtmp.go
|
package rtmp
import (
"bufio"
"bytes"
"crypto/hmac"
"crypto/rand"
"crypto/sha256"
"encoding/hex"
"fmt"
"github.com/VKCOM/joy4/av"
"github.com/VKCOM/joy4/av/avutil"
"github.com/VKCOM/joy4/format/flv"
"github.com/VKCOM/joy4/format/flv/flvio"
"github.com/VKCOM/joy4/utils/bits/pio"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"io"
"net"
"net/url"
"strings"
"time"
)
var (
Debug bool
MaxChunkSize = 128 * 1024 * 1024
)
func ParseURL(uri string) (u *url.URL, err error) {
if u, err = url.Parse(uri); err != nil {
return
}
if _, _, serr := net.SplitHostPort(u.Host); serr != nil {
u.Host += ":1935"
}
return
}
func Dial(uri string) (conn *Conn, err error) {
return DialTimeout(uri, 0)
}
func DialTimeout(uri string, timeout time.Duration) (conn *Conn, err error) {
var u *url.URL
if u, err = ParseURL(uri); err != nil {
return
}
dailer := net.Dialer{Timeout: timeout}
var netconn net.Conn
if netconn, err = dailer.Dial("tcp", u.Host); err != nil {
return
}
conn = NewConn(netconn)
conn.URL = u
return
}
type Server struct {
Addr string
HandlePublish func(*Conn)
HandlePlay func(*Conn)
HandleConn func(*Conn)
CreateConn func(net.Conn) *Conn
}
func (self *Server) handleConn(conn *Conn) (err error) {
if self.HandleConn != nil {
self.HandleConn(conn)
} else {
if err = conn.prepare(stageCommandDone, 0); err != nil {
return
}
if conn.playing {
if self.HandlePlay != nil {
self.HandlePlay(conn)
}
} else if conn.publishing {
if self.HandlePublish != nil {
self.HandlePublish(conn)
}
}
}
return
}
func (self *Server) Listen() (listener *net.TCPListener, err error) {
addr := self.Addr
if addr == "" {
addr = ":1935"
}
var tcpaddr *net.TCPAddr
if tcpaddr, err = net.ResolveTCPAddr("tcp", addr); err != nil {
err = errors.Errorf("rtmp: ListenAndServe: %s", err)
return nil, err
}
if listener, err = net.ListenTCP("tcp", tcpaddr); err != nil {
return nil, err
}
logrus.Infof("rtmp: server: listening on %s", addr)
return listener, nil
}
func (self *Server) Serve(listener *net.TCPListener) (err error) {
for {
var netconn net.Conn
if netconn, err = listener.Accept(); err != nil
|
logrus.Info("rtmp: server: accepted")
var conn *Conn
if self.CreateConn != nil {
conn = self.CreateConn(netconn)
} else {
conn = NewConn(netconn)
}
conn.isserver = true
go func() {
err := self.handleConn(conn)
logrus.Infof("rtmp: server: client closed err: %v", err)
}()
}
}
func (self *Server) ListenAndServe() error {
if listener, err := self.Listen(); err != nil {
return err
} else {
err = self.Serve(listener)
return err
}
}
const (
stageHandshakeDone = iota + 1
stageCommandDone
stageCodecDataDone
)
const (
prepareReading = iota + 1
prepareWriting
)
type Conn struct {
URL *url.URL
OnPlayOrPublish func(string, flvio.AMFMap) error
Prober *flv.Prober
streams []av.CodecData
txbytes uint64
rxbytes uint64
bufr *bufio.Reader
bufw *bufio.Writer
ackn uint32
writebuf []byte
readbuf []byte
netconn net.Conn
txrxcount *txrxcount
writeMaxChunkSize int
readMaxChunkSize int
readAckSize uint32
readcsmap map[uint32]*chunkStream
isserver bool
publishing, playing bool
reading, writing bool
stage int
avmsgsid uint32
gotcommand bool
commandname string
commandtransid float64
commandobj flvio.AMFMap
commandparams []interface{}
gotmsg bool
timestamp uint32
msgdata []byte
msgtypeid uint8
datamsgvals []interface{}
avtag flvio.Tag
eventtype uint16
}
type txrxcount struct {
io.ReadWriter
txbytes uint64
rxbytes uint64
}
func (self *txrxcount) Read(p []byte) (int, error) {
n, err := self.ReadWriter.Read(p)
self.rxbytes += uint64(n)
return n, err
}
func (self *txrxcount) Write(p []byte) (int, error) {
n, err := self.ReadWriter.Write(p)
self.txbytes += uint64(n)
return n, err
}
func NewConn(netconn net.Conn) *Conn {
conn := &Conn{}
conn.Prober = &flv.Prober{}
conn.netconn = netconn
conn.readcsmap = make(map[uint32]*chunkStream)
conn.readMaxChunkSize = 128
conn.writeMaxChunkSize = 128
conn.bufr = bufio.NewReaderSize(netconn, pio.RecommendBufioSize)
conn.bufw = bufio.NewWriterSize(netconn, pio.RecommendBufioSize)
conn.txrxcount = &txrxcount{ReadWriter: netconn}
conn.writebuf = make([]byte, 4096)
conn.readbuf = make([]byte, 4096)
return conn
}
type chunkStream struct {
timenow uint32
timedelta uint32
hastimeext bool
msgsid uint32
msgtypeid uint8
msgdatalen uint32
msgdataleft uint32
msghdrtype uint8
msgdata []byte
}
func (self *chunkStream) Start() {
self.msgdataleft = self.msgdatalen
self.msgdata = make([]byte, self.msgdatalen)
}
const (
msgtypeidUserControl = 4
msgtypeidAck = 3
msgtypeidWindowAckSize = 5
msgtypeidSetPeerBandwidth = 6
msgtypeidSetChunkSize = 1
msgtypeidCommandMsgAMF0 = 20
msgtypeidCommandMsgAMF3 = 17
msgtypeidDataMsgAMF0 = 18
msgtypeidDataMsgAMF3 = 15
msgtypeidVideoMsg = 9
msgtypeidAudioMsg = 8
)
const (
eventtypeStreamBegin = 0
eventtypeSetBufferLength = 3
eventtypeStreamIsRecorded = 4
)
func (self *Conn) NetConn() net.Conn {
return self.netconn
}
func (self *Conn) TxBytes() uint64 {
return self.txrxcount.txbytes
}
func (self *Conn) RxBytes() uint64 {
return self.txrxcount.rxbytes
}
func (self *Conn) Close() (err error) {
return self.netconn.Close()
}
func (self *Conn) pollCommand() (err error) {
for {
if err = self.pollMsg(); err != nil {
return
}
if self.gotcommand {
return
}
}
}
func (self *Conn) pollAVTag() (tag flvio.Tag, err error) {
for {
if err = self.pollMsg(); err != nil {
return
}
switch self.msgtypeid {
case msgtypeidVideoMsg, msgtypeidAudioMsg:
tag = self.avtag
return
}
}
}
func (self *Conn) pollMsg() (err error) {
self.gotmsg = false
self.gotcommand = false
self.datamsgvals = nil
self.avtag = flvio.Tag{}
for {
if err = self.readChunk(); err != nil {
return
}
if self.gotmsg {
return
}
}
}
func SplitPath(u *url.URL) (app, stream string) {
pathsegs := strings.SplitN(u.RequestURI(), "/", 3)
if len(pathsegs) > 1 {
app = pathsegs[1]
}
if len(pathsegs) > 2 {
stream = pathsegs[2]
}
return
}
func getTcUrl(u *url.URL) string {
app, _ := SplitPath(u)
nu := *u
nu.Path = "/" + app
return nu.String()
}
func createURL(tcurl, app, play string) (u *url.URL) {
ps := strings.Split(app+"/"+play, "/")
out := []string{""}
for _, s := range ps {
if len(s) > 0 {
out = append(out, s)
}
}
if len(out) < 2 {
out = append(out, "")
}
path := strings.Join(out, "/")
u, _ = url.ParseRequestURI(path)
if tcurl != "" {
tu, _ := url.Parse(tcurl)
if tu != nil {
u.Host = tu.Host
u.Scheme = tu.Scheme
}
}
return
}
var CodecTypes = flv.CodecTypes
func (self *Conn) writeBasicConf() (err error) {
// > SetChunkSize
if err = self.writeSetChunkSize(MaxChunkSize); err != nil {
return
}
// > WindowAckSize
if err = self.writeWindowAckSize(5000000); err != nil {
return
}
// > SetPeerBandwidth
if err = self.writeSetPeerBandwidth(5000000, 2); err != nil {
return
}
return
}
func (self *Conn) readConnect() (err error) {
var connectpath string
// < connect("app")
if err = self.pollCommand(); err != nil {
return
}
if self.commandname != "connect" {
err = fmt.Errorf("rtmp: first command is not connect")
return
}
if self.commandobj == nil {
err = fmt.Errorf("rtmp: connect command params invalid")
return
}
var ok bool
var _app, _tcurl interface{}
if _app, ok = self.commandobj["app"]; !ok {
err = fmt.Errorf("rtmp: `connect` params missing `app`")
return
}
connectpath, _ = _app.(string)
var tcurl string
if _tcurl, ok = self.commandobj["tcUrl"]; !ok {
_tcurl, ok = self.commandobj["tcurl"]
}
if ok {
tcurl, _ = _tcurl.(string)
}
connectparams := self.commandobj
if err = self.writeBasicConf(); err != nil {
return
}
// > _result("NetConnection.Connect.Success")
if err = self.writeCommandMsg(3, 0, "_result", self.commandtransid,
flvio.AMFMap{
"fmtVer": "FMS/3,0,1,123",
"capabilities": 31,
},
flvio.AMFMap{
"level": "status",
"code": "NetConnection.Connect.Success",
"description": "Connection succeeded.",
"objectEncoding": 3,
},
); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
for {
if err = self.pollMsg(); err != nil {
return
}
if self.gotcommand {
switch self.commandname {
// < createStream
case "createStream":
self.avmsgsid = uint32(1)
// > _result(streamid)
if err = self.writeCommandMsg(3, 0, "_result", self.commandtransid, nil, self.avmsgsid); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
// < publish("path")
case "publish":
if Debug {
logrus.Debug("rtmp: < publish")
}
if len(self.commandparams) < 1 {
err = fmt.Errorf("rtmp: publish params invalid")
return
}
publishpath, _ := self.commandparams[0].(string)
var cberr error
if self.OnPlayOrPublish != nil {
cberr = self.OnPlayOrPublish(self.commandname, connectparams)
}
// > onStatus()
if err = self.writeCommandMsg(5, self.avmsgsid,
"onStatus", self.commandtransid, nil,
flvio.AMFMap{
"level": "status",
"code": "NetStream.Publish.Start",
"description": "Start publishing",
},
); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
if cberr != nil {
err = fmt.Errorf("rtmp: OnPlayOrPublish check failed")
return
}
self.URL = createURL(tcurl, connectpath, publishpath)
self.publishing = true
self.reading = true
self.stage++
return
// < play("path")
case "play":
if Debug {
logrus.Debug("rtmp: < play")
}
if len(self.commandparams) < 1 {
err = fmt.Errorf("rtmp: command play params invalid")
return
}
playpath, _ := self.commandparams[0].(string)
// > streamBegin(streamid)
if err = self.writeStreamBegin(self.avmsgsid); err != nil {
return
}
// > onStatus()
if err = self.writeCommandMsg(5, self.avmsgsid,
"onStatus", self.commandtransid, nil,
flvio.AMFMap{
"level": "status",
"code": "NetStream.Play.Start",
"description": "Start live",
},
); err != nil {
return
}
// > |RtmpSampleAccess()
if err = self.writeDataMsg(5, self.avmsgsid,
"|RtmpSampleAccess", true, true,
); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
self.URL = createURL(tcurl, connectpath, playpath)
self.playing = true
self.writing = true
self.stage++
return
}
}
}
return
}
func (self *Conn) checkConnectResult() (ok bool, errmsg string) {
if len(self.commandparams) < 1 {
errmsg = "params length < 1"
return
}
obj, _ := self.commandparams[0].(flvio.AMFMap)
if obj == nil {
errmsg = "params[0] not object"
return
}
_code, _ := obj["code"]
if _code == nil {
errmsg = "code invalid"
return
}
code, _ := _code.(string)
if code != "NetConnection.Connect.Success" {
errmsg = "code != NetConnection.Connect.Success"
return
}
ok = true
return
}
func (self *Conn) checkCreateStreamResult() (ok bool, avmsgsid uint32) {
if len(self.commandparams) < 1 {
return
}
ok = true
_avmsgsid, _ := self.commandparams[0].(float64)
avmsgsid = uint32(_avmsgsid)
return
}
func (self *Conn) probe() (err error) {
for !self.Prober.Probed() {
var tag flvio.Tag
if tag, err = self.pollAVTag(); err != nil {
return
}
if err = self.Prober.PushTag(tag, int32(self.timestamp)); err != nil {
return
}
}
self.streams = self.Prober.Streams
self.stage++
return
}
func (self *Conn) writeConnect(path string) (err error) {
if err = self.writeBasicConf(); err != nil {
return
}
// > connect("app")
if Debug {
logrus.Debugf("rtmp: > connect('%s') host=%s\n", path, self.URL.Host)
}
if err = self.writeCommandMsg(3, 0, "connect", 1,
flvio.AMFMap{
"app": path,
"flashVer": "MAC 22,0,0,192",
"tcUrl": getTcUrl(self.URL),
"fpad": false,
"capabilities": 15,
"audioCodecs": 4071,
"videoCodecs": 252,
"videoFunction": 1,
},
); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
for {
if err = self.pollMsg(); err != nil {
return
}
if self.gotcommand {
// < _result("NetConnection.Connect.Success")
if self.commandname == "_result" {
var ok bool
var errmsg string
if ok, errmsg = self.checkConnectResult(); !ok {
err = fmt.Errorf("rtmp: command connect failed: %s", errmsg)
return
}
if Debug {
logrus.Debug("rtmp: < _result() of connect\n")
}
break
}
} else {
if self.msgtypeid == msgtypeidWindowAckSize {
if len(self.msgdata) == 4 {
self.readAckSize = pio.U32BE(self.msgdata)
}
if err = self.writeWindowAckSize(0xffffffff); err != nil {
return
}
}
}
}
return
}
func (self *Conn) connectPublish() (err error) {
connectpath, publishpath := SplitPath(self.URL)
if err = self.writeConnect(connectpath); err != nil {
return
}
transid := 2
// > createStream()
if Debug {
logrus.Debug("rtmp: > createStream()\n")
}
if err = self.writeCommandMsg(3, 0, "createStream", transid, nil); err != nil {
return
}
transid++
if err = self.flushWrite(); err != nil {
return
}
for {
if err = self.pollMsg(); err != nil {
return
}
if self.gotcommand {
// < _result(avmsgsid) of createStream
if self.commandname == "_result" {
var ok bool
if ok, self.avmsgsid = self.checkCreateStreamResult(); !ok {
err = fmt.Errorf("rtmp: createStream command failed")
return
}
break
}
}
}
// > publish('app')
if Debug {
logrus.Debugf("rtmp: > publish('%s')\n", publishpath)
}
if err = self.writeCommandMsg(8, self.avmsgsid, "publish", transid, nil, publishpath); err != nil {
return
}
transid++
if err = self.flushWrite(); err != nil {
return
}
self.writing = true
self.publishing = true
self.stage++
return
}
func (self *Conn) connectPlay() (err error) {
connectpath, playpath := SplitPath(self.URL)
if err = self.writeConnect(connectpath); err != nil {
return
}
// > createStream()
if Debug {
logrus.Debug("rtmp: > createStream()\n")
}
if err = self.writeCommandMsg(3, 0, "createStream", 2, nil); err != nil {
return
}
// > SetBufferLength 0,100ms
if err = self.writeSetBufferLength(0, 100); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
for {
if err = self.pollMsg(); err != nil {
return
}
if self.gotcommand {
// < _result(avmsgsid) of createStream
if self.commandname == "_result" {
var ok bool
if ok, self.avmsgsid = self.checkCreateStreamResult(); !ok {
err = fmt.Errorf("rtmp: createStream command failed")
return
}
break
}
}
}
// > play('app')
if Debug {
logrus.Debugf("rtmp: > play('%s')\n", playpath)
}
if err = self.writeCommandMsg(8, self.avmsgsid, "play", 0, nil, playpath); err != nil {
return
}
if err = self.flushWrite(); err != nil {
return
}
self.reading = true
self.playing = true
self.stage++
return
}
func (self *Conn) ReadPacket() (pkt av.Packet, err error) {
if err = self.prepare(stageCodecDataDone, prepareReading); err != nil {
return
}
if !self.Prober.Empty() {
pkt = self.Prober.PopPacket()
return
}
for {
var tag flvio.Tag
if tag, err = self.pollAVTag(); err != nil {
return
}
var ok bool
if pkt, ok = self.Prober.TagToPacket(tag, int32(self.timestamp)); ok {
return
}
}
return
}
func (self *Conn) Prepare() (err error) {
return self.prepare(stageCommandDone, 0)
}
func (self *Conn) prepare(stage int, flags int) (err error) {
for self.stage < stage {
switch self.stage {
case 0:
if self.isserver {
if err = self.handshakeServer(); err != nil {
return
}
} else {
if err = self.handshakeClient(); err != nil {
return
}
}
case stageHandshakeDone:
if self.isserver {
if err = self.readConnect(); err != nil {
return
}
} else {
if flags == prepareReading {
if err = self.connectPlay(); err != nil {
return
}
} else {
if err = self.connectPublish(); err != nil {
return
}
}
}
case stageCommandDone:
if flags == prepareReading {
if err = self.probe(); err != nil {
return
}
} else {
err = fmt.Errorf("rtmp: call WriteHeader() before WritePacket()")
return
}
}
}
return
}
func (self *Conn) Streams() (streams []av.CodecData, err error) {
if err = self.prepare(stageCodecDataDone, prepareReading); err != nil {
return
}
streams = self.streams
return
}
func (self *Conn) WritePacket(pkt av.Packet) (err error) {
if err = self.prepare(stageCodecDataDone, prepareWriting); err != nil {
return
}
stream := self.streams[pkt.Idx]
tag, timestamp := flv.PacketToTag(pkt, stream)
if Debug {
logrus.Debug("rtmp: WritePacket", pkt.Idx, pkt.Time, pkt.CompositionTime)
}
if err = self.writeAVTag(tag, int32(timestamp)); err != nil {
return
}
return
}
func (self *Conn) WriteTrailer() (err error) {
if err = self.flushWrite(); err != nil {
return
}
return
}
func (self *Conn) WriteHeader(streams []av.CodecData) (err error) {
if err = self.prepare(stageCommandDone, prepareWriting); err != nil {
return
}
var metadata flvio.AMFMap
if metadata, err = flv.NewMetadataByStreams(streams); err != nil {
return
}
// > onMetaData()
if err = self.writeDataMsg(5, self.avmsgsid, "onMetaData", metadata); err != nil {
return
}
// > Videodata(decoder config)
// > Audiodata(decoder config)
for _, stream := range streams {
var ok bool
var tag flvio.Tag
if tag, ok, err = flv.CodecDataToTag(stream); err != nil {
return
}
if ok {
if err = self.writeAVTag(tag, 0); err != nil {
return
}
}
}
self.streams = streams
self.stage++
return
}
func (self *Conn) tmpwbuf(n int) []byte {
if len(self.writebuf) < n {
self.writebuf = make([]byte, n)
}
return self.writebuf
}
func (self *Conn) writeSetChunkSize(size int) (err error) {
self.writeMaxChunkSize = size
b := self.tmpwbuf(chunkHeaderLength + 4)
n := self.fillChunkHeader(b, 2, 0, msgtypeidSetChunkSize, 0, 4)
pio.PutU32BE(b[n:], uint32(size))
n += 4
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeAck(seqnum uint32) (err error) {
b := self.tmpwbuf(chunkHeaderLength + 4)
n := self.fillChunkHeader(b, 2, 0, msgtypeidAck, 0, 4)
pio.PutU32BE(b[n:], seqnum)
n += 4
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeWindowAckSize(size uint32) (err error) {
b := self.tmpwbuf(chunkHeaderLength + 4)
n := self.fillChunkHeader(b, 2, 0, msgtypeidWindowAckSize, 0, 4)
pio.PutU32BE(b[n:], size)
n += 4
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeSetPeerBandwidth(acksize uint32, limittype uint8) (err error) {
b := self.tmpwbuf(chunkHeaderLength + 5)
n := self.fillChunkHeader(b, 2, 0, msgtypeidSetPeerBandwidth, 0, 5)
pio.PutU32BE(b[n:], acksize)
n += 4
b[n] = limittype
n++
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeCommandMsg(csid, msgsid uint32, args ...interface{}) (err error) {
return self.writeAMF0Msg(msgtypeidCommandMsgAMF0, csid, msgsid, args...)
}
func (self *Conn) writeDataMsg(csid, msgsid uint32, args ...interface{}) (err error) {
return self.writeAMF0Msg(msgtypeidDataMsgAMF0, csid, msgsid, args...)
}
func (self *Conn) writeAMF0Msg(msgtypeid uint8, csid, msgsid uint32, args ...interface{}) (err error) {
size := 0
for _, arg := range args {
size += flvio.LenAMF0Val(arg)
}
b := self.tmpwbuf(chunkHeaderLength + size)
n := self.fillChunkHeader(b, csid, 0, msgtypeid, msgsid, size)
for _, arg := range args {
n += flvio.FillAMF0Val(b[n:], arg)
}
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeAVTag(tag flvio.Tag, ts int32) (err error) {
var msgtypeid uint8
var csid uint32
var data []byte
switch tag.Type {
case flvio.TAG_AUDIO:
msgtypeid = msgtypeidAudioMsg
csid = 6
data = tag.Data
case flvio.TAG_VIDEO:
msgtypeid = msgtypeidVideoMsg
csid = 7
data = tag.Data
}
actualChunkHeaderLength := chunkHeaderLength
if uint32(ts) > FlvTimestampMax {
actualChunkHeaderLength += 4
}
b := self.tmpwbuf(actualChunkHeaderLength + flvio.MaxTagSubHeaderLength)
hdrlen := tag.FillHeader(b[actualChunkHeaderLength:])
self.fillChunkHeader(b, csid, ts, msgtypeid, self.avmsgsid, hdrlen+len(data))
n := hdrlen + actualChunkHeaderLength
if n+len(data) > self.writeMaxChunkSize {
if err = self.writeSetChunkSize(n + len(data)); err != nil {
return
}
}
if _, err = self.bufw.Write(b[:n]); err != nil {
return
}
_, err = self.bufw.Write(data)
return
}
func (self *Conn) writeStreamBegin(msgsid uint32) (err error) {
b := self.tmpwbuf(chunkHeaderLength + 6)
n := self.fillChunkHeader(b, 2, 0, msgtypeidUserControl, 0, 6)
pio.PutU16BE(b[n:], eventtypeStreamBegin)
n += 2
pio.PutU32BE(b[n:], msgsid)
n += 4
_, err = self.bufw.Write(b[:n])
return
}
func (self *Conn) writeSetBufferLength(msgsid uint32, timestamp uint32) (err error) {
b := self.tmpwbuf(chunkHeaderLength + 10)
n := self.fillChunkHeader(b, 2, 0, msgtypeidUserControl, 0, 10)
pio.PutU16BE(b[n:], eventtypeSetBufferLength)
n += 2
pio.PutU32BE(b[n:], msgsid)
n += 4
pio.PutU32BE(b[n:], timestamp)
n += 4
_, err = self.bufw.Write(b[:n])
return
}
const chunkHeaderLength = 12
const FlvTimestampMax = 0xFFFFFF
func (self *Conn) fillChunkHeader(b []byte, csid uint32, timestamp int32, msgtypeid uint8, msgsid uint32, msgdatalen int) (n int) {
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | timestamp |message length |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | message length (cont) |message type id| msg stream id |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | message stream id (cont) |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// Figure 9 Chunk Message Header – Type 0
b[n] = byte(csid) & 0x3f
n++
if uint32(timestamp) <= FlvTimestampMax {
pio.PutU24BE(b[n:], uint32(timestamp))
} else {
pio.PutU24BE(b[n:], FlvTimestampMax)
}
n += 3
pio.PutU24BE(b[n:], uint32(msgdatalen))
n += 3
b[n] = msgtypeid
n++
pio.PutU32LE(b[n:], msgsid)
n += 4
if uint32(timestamp) > FlvTimestampMax {
pio.PutU32BE(b[n:], uint32(timestamp))
n += 4
}
if Debug {
logrus.Debugf("rtmp: write chunk msgdatalen=%d msgsid=%d\n", msgdatalen, msgsid)
}
return
}
func (self *Conn) flushWrite() (err error) {
if err = self.bufw.Flush(); err != nil {
return
}
return
}
func (self *Conn) readChunk() (err error) {
b := self.readbuf
n := 0
if _, err = io.ReadFull(self.bufr, b[:1]); err != nil {
return
}
header := b[0]
n += 1
var msghdrtype uint8
var csid uint32
msghdrtype = header >> 6
csid = uint32(header) & 0x3f
switch csid {
default: // Chunk basic header 1
case 0: // Chunk basic header 2
if _, err = io.ReadFull(self.bufr, b[:1]); err != nil {
return
}
n += 1
csid = uint32(b[0]) + 64
case 1: // Chunk basic header 3
if _, err = io.ReadFull(self.bufr, b[:2]); err != nil {
return
}
n += 2
csid = uint32(pio.U16BE(b)) + 64
}
cs := self.readcsmap[csid]
if cs == nil {
cs = &chunkStream{}
self.readcsmap[csid] = cs
}
var timestamp uint32
switch msghdrtype {
case 0:
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | timestamp |message length |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | message length (cont) |message type id| msg stream id |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | message stream id (cont) |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// Figure 9 Chunk Message Header – Type 0
if cs.msgdataleft != 0 {
err = fmt.Errorf("rtmp: chunk msgdataleft=%d invalid", cs.msgdataleft)
return
}
h := b[:11]
if _, err = io.ReadFull(self.bufr, h); err != nil {
return
}
n += len(h)
timestamp = pio.U24BE(h[0:3])
cs.msghdrtype = msghdrtype
cs.msgdatalen = pio.U24BE(h[3:6])
cs.msgtypeid = h[6]
cs.msgsid = pio.U32LE(h[7:11])
if timestamp == 0xffffff {
if _, err = io.ReadFull(self.bufr, b[:4]); err != nil {
return
}
n += 4
timestamp = pio.U32BE(b)
cs.hastimeext = true
} else {
cs.hastimeext = false
}
cs.timenow = timestamp
cs.Start()
case 1:
// 0 1 2 3
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | timestamp delta |message length |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | message length (cont) |message type id|
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// Figure 10 Chunk Message Header – Type 1
if cs.msgdataleft != 0 {
err = fmt.Errorf("rtmp: chunk msgdataleft=%d invalid", cs.msgdataleft)
return
}
h := b[:7]
if _, err = io.ReadFull(self.bufr, h); err != nil {
return
}
n += len(h)
timestamp = pio.U24BE(h[0:3])
cs.msghdrtype = msghdrtype
cs.msgdatalen = pio.U24BE(h[3:6])
cs.msgtypeid = h[6]
if timestamp == 0xffffff {
if _, err = io.ReadFull(self.bufr, b[:4]); err != nil {
return
}
n += 4
timestamp = pio.U32BE(b)
cs.hastimeext = true
} else {
cs.hastimeext = false
}
cs.timedelta = timestamp
cs.timenow += timestamp
cs.Start()
case 2:
// 0 1 2
// 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
// | timestamp delta |
// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
//
// Figure 11 Chunk Message Header – Type 2
if cs.msgdataleft != 0 {
err = fmt.Errorf("rtmp: chunk msgdataleft=%d invalid", cs.msgdataleft)
return
}
h := b[:3]
if _, err = io.ReadFull(self.bufr, h); err != nil {
return
}
n += len(h)
cs.msghdrtype = msghdrtype
timestamp = pio.U24BE(h[0:3])
if timestamp == 0xffffff {
if _, err = io.ReadFull(self.bufr, b[:4]); err != nil {
return
}
n += 4
timestamp = pio.U32BE(b)
cs.hastimeext = true
} else {
cs.hastimeext = false
}
cs.timedelta = timestamp
cs.timenow += timestamp
cs.Start()
case 3:
if cs.msgdataleft == 0 {
switch cs.msghdrtype {
case 0:
if cs.hastimeext {
if _, err = io.ReadFull(self.bufr, b[:4]); err != nil {
return
}
n += 4
timestamp = pio.U32BE(b)
cs.timenow = timestamp
}
case 1, 2:
if cs.hastimeext {
if _, err = io.ReadFull(self.bufr, b[:4]); err != nil {
return
}
n += 4
timestamp = pio.U32BE(b)
} else {
timestamp = cs.timedelta
}
cs.timenow += timestamp
}
cs.Start()
}
default:
err = fmt.Errorf("rtmp: invalid chunk msg header type=%d", msghdrtype)
return
}
size := int(cs.msgdataleft)
if size > self.readMaxChunkSize {
size = self.readMaxChunkSize
}
off := cs.msgdatalen - cs.msgdataleft
buf := cs.msgdata[off : int(off)+size]
if _, err = io.ReadFull(self.bufr, buf); err != nil {
return
}
n += len(buf)
cs.msgdataleft -= uint32(size)
if Debug {
logrus.Debugf("rtmp: chunk msgsid=%d msgtypeid=%d msghdrtype=%d len=%d left=%d\n",
cs.msgsid, cs.msgtypeid, cs.msghdrtype, cs.msgdatalen, cs.msgdataleft)
}
if cs.msgdataleft == 0 {
if Debug {
logrus.Debug("rtmp: chunk data")
logrus.Debug(hex.Dump(cs.msgdata))
}
if err = self.handleMsg(cs.timenow, cs.msgsid, cs.msgtypeid, cs.msgdata); err != nil {
return
}
}
self.ackn += uint32(n)
if self.readAckSize != 0 && self.ackn > self.readAckSize {
if err = self.writeAck(self.ackn); err != nil {
return
}
self.ackn = 0
}
return
}
func (self *Conn) handleCommandMsgAMF0(b []byte) (n int, err error) {
var name, transid, obj interface{}
var size int
if name, size, err = flvio.ParseAMF0Val(b[n:]); err != nil {
return
}
n += size
if transid, size, err = flvio.ParseAMF0Val(b[n:]); err != nil {
return
}
n += size
if obj, size, err = flvio.ParseAMF0Val(b[n:]); err != nil {
return
}
n += size
var ok bool
if self.commandname, ok = name.(string); !ok {
err = fmt.Errorf("rtmp: CommandMsgAMF0 command is not string")
return
}
self.commandtransid, _ = transid.(float64)
self.commandobj, _ = obj.(flvio.AMFMap)
self.commandparams = []interface{}{}
for n < len(b) {
if obj, size, err = flvio.ParseAMF0Val(b[n:]); err != nil {
return
}
n += size
self.commandparams = append(self.commandparams, obj)
}
if n < len(b) {
err = fmt.Errorf("rtmp: CommandMsgAMF0 left bytes=%d", len(b)-n)
return
}
self.gotcommand = true
return
}
func (self *Conn) handleMsg(timestamp uint32, msgsid uint32, msgtypeid uint8, msgdata []byte) (err error) {
self.msgdata = msgdata
self.msgtypeid = msgtypeid
self.timestamp = timestamp
switch msgtypeid {
case msgtypeidCommandMsgAMF0:
if _, err = self.handleCommandMsgAMF0(msgdata); err != nil {
return
}
case msgtypeidCommandMsgAMF3:
if len(msgdata) < 1 {
err = fmt.Errorf("rtmp: short packet of CommandMsgAMF3")
return
}
// skip first byte
if _, err = self.handleCommandMsgAMF0(msgdata[1:]); err != nil {
return
}
case msgtypeidUserControl:
if len(msgdata) < 2 {
err = fmt.Errorf("rtmp: short packet of UserControl")
return
}
self.eventtype = pio.U16BE(msgdata)
case msgtypeidDataMsgAMF0:
b := msgdata
n := 0
for n < len(b) {
var obj interface{}
var size int
if obj, size, err = flvio.ParseAMF0Val(b[n:]); err != nil {
return
}
n += size
self.datamsgvals = append(self.datamsgvals, obj)
}
if n < len(b) {
err = fmt.Errorf("rtmp: DataMsgAMF0 left bytes=%d", len(b)-n)
return
}
case msgtypeidVideoMsg:
if len(msgdata) == 0 {
return
}
tag := flvio.Tag{Type: flvio.TAG_VIDEO}
var n int
if n, err = (&tag).ParseHeader(msgdata); err != nil {
return
}
if !(tag.FrameType == flvio.FRAME_INTER || tag.FrameType == flvio.FRAME_KEY) {
return
}
tag.Data = msgdata[n:]
self.avtag = tag
case msgtypeidAudioMsg:
if len(msgdata) == 0 {
return
}
tag := flvio.Tag{Type: flvio.TAG_AUDIO}
var n int
if n, err = (&tag).ParseHeader(msgdata); err != nil {
return
}
tag.Data = msgdata[n:]
self.avtag = tag
case msgtypeidSetChunkSize:
if len(msgdata) < 4 {
err = fmt.Errorf("rtmp: short packet of SetChunkSize")
return
}
self.readMaxChunkSize = int(pio.U32BE(msgdata))
return
}
self.gotmsg = true
return
}
var (
hsClientFullKey = []byte{
'G', 'e', 'n', 'u', 'i', 'n', 'e', ' ', 'A', 'd', 'o', 'b', 'e', ' ',
'F', 'l', 'a', 's', 'h', ' ', 'P', 'l', 'a', 'y', 'e', 'r', ' ',
'0', '0', '1',
0xF0, 0xEE, 0xC2, 0x4A, 0x80, 0x68, 0xBE, 0xE8, 0x2E, 0x00, 0xD0, 0xD1,
0x02, 0x9E, 0x7E, 0x57, 0x6E, 0xEC, 0x5D, 0x2D, 0x29, 0x80, 0x6F, 0xAB,
0x93, 0xB8, 0xE6, 0x36, 0xCF, 0xEB, 0x31, 0xAE,
}
hsServerFullKey = []byte{
'G', 'e', 'n', 'u', 'i', 'n', 'e', ' ', 'A', 'd', 'o', 'b', 'e', ' ',
'F', 'l', 'a', 's', 'h', ' ', 'M', 'e', 'd', 'i', 'a', ' ',
'S', 'e', 'r', 'v', 'e', 'r', ' ',
'0', '0', '1',
0xF0, 0xEE, 0xC2, 0x4A, 0x80, 0x68, 0xBE, 0xE8, 0x2E, 0x00, 0xD0, 0xD1,
0x02, 0x9E, 0x7E, 0x57, 0x6E, 0xEC, 0x5D, 0x2D, 0x29, 0x80, 0x6F, 0xAB,
0x93, 0xB8, 0xE6, 0x36, 0xCF, 0xEB, 0x31, 0xAE,
}
hsClientPartialKey = hsClientFullKey[:30]
hsServerPartialKey = hsServerFullKey[:36]
)
func hsMakeDigest(key []byte, src []byte, gap int) (dst []byte) {
h := hmac.New(sha256.New, key)
if gap <= 0 {
h.Write(src)
} else {
h.Write(src[:gap])
h.Write(src[gap+32:])
}
return h.Sum(nil)
}
func hsCalcDigestPos(p []byte, base int) (pos int) {
for i := 0; i < 4; i++ {
pos += int(p[base+i])
}
pos = (pos % 728) + base + 4
return
}
func hsFindDigest(p []byte, key []byte, base int) int {
gap := hsCalcDigestPos(p, base)
digest := hsMakeDigest(key, p, gap)
if bytes.Compare(p[gap:gap+32], digest) != 0 {
return -1
}
return gap
}
func hsParse1(p []byte, peerkey []byte, key []byte) (ok bool, digest []byte) {
var pos int
if pos = hsFindDigest(p, peerkey, 772); pos == -1 {
if pos = hsFindDigest(p, peerkey, 8); pos == -1 {
return
}
}
ok = true
digest = hsMakeDigest(key, p[pos:pos+32], -1)
return
}
func hsCreate01(p []byte, time uint32, ver uint32, key []byte) {
p[0] = 3
p1 := p[1:]
rand.Read(p1[8:])
pio.PutU32BE(p1[0:4], time)
pio.PutU32BE(p1[4:8], ver)
gap := hsCalcDigestPos(p1, 8)
digest := hsMakeDigest(key, p1, gap)
copy(p1[gap:], digest)
}
func hsCreate2(p []byte, key []byte) {
rand.Read(p)
gap := len(p) - 32
digest := hsMakeDigest(key, p, gap)
copy(p[gap:], digest)
}
func (self *Conn) handshakeClient() (err error) {
var random [(1 + 1536*2) * 2]byte
C0C1C2 := random[:1536*2+1]
C0 := C0C1C2[:1]
//C1 := C0C1C2[1:1536+1]
C0C1 := C0C1C2[:1536+1]
C2 := C0C1C2[1536+1:]
S0S1S2 := random[1536*2+1:]
//S0 := S0S1S2[:1]
S1 := S0S1S2[1 : 1536+1]
//S0S1 := S0S1S2[:1536+1]
//S2 := S0S1S2[1536+1:]
C0[0] = 3
//hsCreate01(C0C1, hsClientFullKey)
// > C0C1
if _, err = self.bufw.Write(C0C1); err != nil {
return
}
if err = self.bufw.Flush(); err != nil {
return
}
// < S0S1S2
if _, err = io.ReadFull(self.bufr, S0S1S2); err != nil {
return
}
if Debug {
logrus.Debug("rtmp: handshakeClient: server version", S1[4], S1[5], S1[6], S1[7])
}
if ver := pio.U32BE(S1[4:8]); ver != 0 {
C2 = S1
} else {
C2 = S1
}
// > C2
if _, err = self.bufw.Write(C2); err != nil {
return
}
self.stage++
return
}
func (self *Conn) handshakeServer() (err error) {
var random [(1 + 1536*2) * 2]byte
C0C1C2 := random[:1536*2+1]
C0 := C0C1C2[:1]
C1 := C0C1C2[1 : 1536+1]
C0C1 := C0C1C2[:1536+1]
C2 := C0C1C2[1536+1:]
S0S1S2 := random[1536*2+1:]
S0 := S0S1S2[:1]
S1 := S0S1S2[1 : 1536+1]
S0S1 := S0S1S2[:1536+1]
S2 := S0S1S2[1536+1:]
// < C0C1
if _, err = io.ReadFull(self.bufr, C0C1); err != nil {
return
}
if C0[0] != 3 {
err = fmt.Errorf("rtmp: handshake version=%d invalid", C0[0])
return
}
S0[0] = 3
clitime := pio.U32BE(C1[0:4])
srvtime := clitime
srvver := uint32(0x0d0e0a0d)
cliver := pio.U32BE(C1[4:8])
if cliver != 0 {
var ok bool
var digest []byte
if ok, digest = hsParse1(C1, hsClientPartialKey, hsServerFullKey); !ok {
err = fmt.Errorf("rtmp: handshake server: C1 invalid")
return
}
hsCreate01(S0S1, srvtime, srvver, hsServerPartialKey)
hsCreate2(S2, digest)
} else {
copy(S1, C1)
copy(S2, C2)
}
// > S0S1S2
if _, err = self.bufw.Write(S0S1S2); err != nil {
return
}
if err = self.bufw.Flush(); err != nil {
return
}
// < C2
if _, err = io.ReadFull(self.bufr, C2); err != nil {
return
}
self.stage++
return
}
type closeConn struct {
*Conn
waitclose chan bool
}
func (self closeConn) Close() error {
self.waitclose <- true
return nil
}
func Handler(h *avutil.RegisterHandler) {
h.UrlDemuxer = func(uri string) (ok bool, demuxer av.DemuxCloser, err error) {
if !strings.HasPrefix(uri, "rtmp://") {
return
}
ok = true
demuxer, err = Dial(uri)
return
}
h.UrlMuxer = func(uri string) (ok bool, muxer av.MuxCloser, err error) {
if !strings.HasPrefix(uri, "rtmp://") {
return
}
ok = true
muxer, err = Dial(uri)
return
}
h.ServerMuxer = func(uri string) (ok bool, muxer av.MuxCloser, err error) {
if !strings.HasPrefix(uri, "rtmp://") {
return
}
ok = true
var u *url.URL
if u, err = ParseURL(uri); err != nil {
return
}
server := &Server{
Addr: u.Host,
}
waitstart := make(chan error)
waitconn := make(chan *Conn)
waitclose := make(chan bool)
server.HandlePlay = func(conn *Conn) {
waitconn <- conn
<-waitclose
}
go func() {
waitstart <- server.ListenAndServe()
}()
select {
case err = <-waitstart:
if err != nil {
return
}
case conn := <-waitconn:
muxer = closeConn{Conn: conn, waitclose: waitclose}
return
}
return
}
h.ServerDemuxer = func(uri string) (ok bool, demuxer av.DemuxCloser, err error) {
if !strings.HasPrefix(uri, "rtmp://") {
return
}
ok = true
var u *url.URL
if u, err = ParseURL(uri); err != nil {
return
}
server := &Server{
Addr: u.Host,
}
waitstart := make(chan error)
waitconn := make(chan *Conn)
waitclose := make(chan bool)
server.HandlePublish = func(conn *Conn) {
waitconn <- conn
<-waitclose
}
go func() {
waitstart <- server.ListenAndServe()
}()
select {
case err = <-waitstart:
if err != nil {
return
}
case conn := <-waitconn:
demuxer = closeConn{Conn: conn, waitclose: waitclose}
return
}
return
}
h.CodecTypes = CodecTypes
}
|
{
return
}
|
popper.js
|
/**!
* @fileOverview Kickass library to create and place poppers near their reference elements.
* @version 1.15.0
* @license
* Copyright (c) 2016 Federico Zivolo and contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
var isBrowser = typeof window !== 'undefined' && typeof document !== 'undefined';
const longerTimeoutBrowsers = ['Edge', 'Trident', 'Firefox'];
let timeoutDuration = 0;
for (let i = 0; i < longerTimeoutBrowsers.length; i += 1) {
if (isBrowser && navigator.userAgent.indexOf(longerTimeoutBrowsers[i]) >= 0) {
timeoutDuration = 1;
break;
}
}
function microtaskDebounce(fn) {
let called = false;
return () => {
if (called) {
return;
}
called = true;
window.Promise.resolve().then(() => {
called = false;
fn();
});
};
}
function taskDebounce(fn) {
let scheduled = false;
return () => {
if (!scheduled) {
scheduled = true;
setTimeout(() => {
scheduled = false;
fn();
}, timeoutDuration);
}
};
}
const supportsMicroTasks = isBrowser && window.Promise;
/**
* Create a debounced version of a method, that's asynchronously deferred
* but called in the minimum time possible.
*
* @method
* @memberof Popper.Utils
* @argument {Function} fn
* @returns {Function}
*/
var debounce = supportsMicroTasks ? microtaskDebounce : taskDebounce;
/**
* Check if the given variable is a function
* @method
* @memberof Popper.Utils
* @argument {Any} functionToCheck - variable to check
* @returns {Boolean} answer to: is a function?
*/
function isFunction(functionToCheck) {
const getType = {};
return functionToCheck && getType.toString.call(functionToCheck) === '[object Function]';
}
/**
* Get CSS computed property of the given element
* @method
* @memberof Popper.Utils
* @argument {Eement} element
* @argument {String} property
*/
function getStyleComputedProperty(element, property) {
if (element.nodeType !== 1) {
return [];
}
// NOTE: 1 DOM access here
const window = element.ownerDocument.defaultView;
const css = window.getComputedStyle(element, null);
return property ? css[property] : css;
}
/**
* Returns the parentNode or the host of the element
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @returns {Element} parent
*/
function getParentNode(element) {
if (element.nodeName === 'HTML') {
return element;
}
return element.parentNode || element.host;
}
/**
* Returns the scrolling parent of the given element
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @returns {Element} scroll parent
*/
function getScrollParent(element) {
// Return body, `getScroll` will take care to get the correct `scrollTop` from it
if (!element) {
return document.body;
}
switch (element.nodeName) {
case 'HTML':
case 'BODY':
return element.ownerDocument.body;
case '#document':
return element.body;
}
// Firefox want us to check `-x` and `-y` variations as well
const { overflow, overflowX, overflowY } = getStyleComputedProperty(element);
if (/(auto|scroll|overlay)/.test(overflow + overflowY + overflowX)) {
return element;
}
return getScrollParent(getParentNode(element));
}
const isIE11 = isBrowser && !!(window.MSInputMethodContext && document.documentMode);
const isIE10 = isBrowser && /MSIE 10/.test(navigator.userAgent);
/**
* Determines if the browser is Internet Explorer
* @method
* @memberof Popper.Utils
* @param {Number} version to check
* @returns {Boolean} isIE
*/
function isIE(version) {
if (version === 11) {
return isIE11;
}
if (version === 10) {
return isIE10;
}
return isIE11 || isIE10;
}
/**
* Returns the offset parent of the given element
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @returns {Element} offset parent
*/
function getOffsetParent(element) {
if (!element) {
return document.documentElement;
}
const noOffsetParent = isIE(10) ? document.body : null;
// NOTE: 1 DOM access here
let offsetParent = element.offsetParent || null;
// Skip hidden elements which don't have an offsetParent
while (offsetParent === noOffsetParent && element.nextElementSibling) {
offsetParent = (element = element.nextElementSibling).offsetParent;
}
const nodeName = offsetParent && offsetParent.nodeName;
if (!nodeName || nodeName === 'BODY' || nodeName === 'HTML') {
return element ? element.ownerDocument.documentElement : document.documentElement;
}
// .offsetParent will return the closest TH, TD or TABLE in case
// no offsetParent is present, I hate this job...
if (['TH', 'TD', 'TABLE'].indexOf(offsetParent.nodeName) !== -1 && getStyleComputedProperty(offsetParent, 'position') === 'static') {
return getOffsetParent(offsetParent);
}
return offsetParent;
}
function isOffsetContainer(element) {
const { nodeName } = element;
if (nodeName === 'BODY') {
return false;
}
return nodeName === 'HTML' || getOffsetParent(element.firstElementChild) === element;
}
/**
* Finds the root node (document, shadowDOM root) of the given element
* @method
* @memberof Popper.Utils
* @argument {Element} node
* @returns {Element} root node
*/
function getRoot(node) {
if (node.parentNode !== null) {
return getRoot(node.parentNode);
}
return node;
}
/**
* Finds the offset parent common to the two provided nodes
* @method
* @memberof Popper.Utils
* @argument {Element} element1
* @argument {Element} element2
* @returns {Element} common offset parent
*/
function findCommonOffsetParent(element1, element2) {
// This check is needed to avoid errors in case one of the elements isn't defined for any reason
if (!element1 || !element1.nodeType || !element2 || !element2.nodeType) {
return document.documentElement;
}
// Here we make sure to give as "start" the element that comes first in the DOM
const order = element1.compareDocumentPosition(element2) & Node.DOCUMENT_POSITION_FOLLOWING;
const start = order ? element1 : element2;
const end = order ? element2 : element1;
// Get common ancestor container
const range = document.createRange();
range.setStart(start, 0);
range.setEnd(end, 0);
const { commonAncestorContainer } = range;
// Both nodes are inside #document
if (element1 !== commonAncestorContainer && element2 !== commonAncestorContainer || start.contains(end)) {
if (isOffsetContainer(commonAncestorContainer)) {
return commonAncestorContainer;
}
return getOffsetParent(commonAncestorContainer);
}
// one of the nodes is inside shadowDOM, find which one
const element1root = getRoot(element1);
if (element1root.host) {
return findCommonOffsetParent(element1root.host, element2);
} else {
return findCommonOffsetParent(element1, getRoot(element2).host);
}
}
/**
* Gets the scroll value of the given element in the given side (top and left)
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @argument {String} side `top` or `left`
* @returns {number} amount of scrolled pixels
*/
function getScroll(element, side = 'top') {
const upperSide = side === 'top' ? 'scrollTop' : 'scrollLeft';
const nodeName = element.nodeName;
if (nodeName === 'BODY' || nodeName === 'HTML') {
const html = element.ownerDocument.documentElement;
const scrollingElement = element.ownerDocument.scrollingElement || html;
return scrollingElement[upperSide];
}
return element[upperSide];
}
/*
* Sum or subtract the element scroll values (left and top) from a given rect object
* @method
* @memberof Popper.Utils
* @param {Object} rect - Rect object you want to change
* @param {HTMLElement} element - The element from the function reads the scroll values
* @param {Boolean} subtract - set to true if you want to subtract the scroll values
* @return {Object} rect - The modifier rect object
*/
function includeScroll(rect, element, subtract = false) {
const scrollTop = getScroll(element, 'top');
const scrollLeft = getScroll(element, 'left');
const modifier = subtract ? -1 : 1;
rect.top += scrollTop * modifier;
rect.bottom += scrollTop * modifier;
rect.left += scrollLeft * modifier;
rect.right += scrollLeft * modifier;
return rect;
}
/*
* Helper to detect borders of a given element
* @method
* @memberof Popper.Utils
* @param {CSSStyleDeclaration} styles
* Result of `getStyleComputedProperty` on the given element
* @param {String} axis - `x` or `y`
* @return {number} borders - The borders size of the given axis
*/
function getBordersSize(styles, axis) {
const sideA = axis === 'x' ? 'Left' : 'Top';
const sideB = sideA === 'Left' ? 'Right' : 'Bottom';
return parseFloat(styles[`border${sideA}Width`], 10) + parseFloat(styles[`border${sideB}Width`], 10);
}
function getSize(axis, body, html, computedStyle) {
return Math.max(body[`offset${axis}`], body[`scroll${axis}`], html[`client${axis}`], html[`offset${axis}`], html[`scroll${axis}`], isIE(10) ? parseInt(html[`offset${axis}`]) + parseInt(computedStyle[`margin${axis === 'Height' ? 'Top' : 'Left'}`]) + parseInt(computedStyle[`margin${axis === 'Height' ? 'Bottom' : 'Right'}`]) : 0);
}
function getWindowSizes(document) {
const body = document.body;
const html = document.documentElement;
const computedStyle = isIE(10) && getComputedStyle(html);
return {
height: getSize('Height', body, html, computedStyle),
width: getSize('Width', body, html, computedStyle)
};
}
var _extends = Object.assign || function (target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i];
for (var key in source) {
if (Object.prototype.hasOwnProperty.call(source, key)) {
target[key] = source[key];
}
}
}
return target;
};
/**
* Given element offsets, generate an output similar to getBoundingClientRect
* @method
* @memberof Popper.Utils
* @argument {Object} offsets
* @returns {Object} ClientRect like output
*/
function getClientRect(offsets) {
return _extends({}, offsets, {
right: offsets.left + offsets.width,
bottom: offsets.top + offsets.height
});
}
/**
* Get bounding client rect of given element
* @method
* @memberof Popper.Utils
* @param {HTMLElement} element
* @return {Object} client rect
*/
function getBoundingClientRect(element) {
let rect = {};
// IE10 10 FIX: Please, don't ask, the element isn't
// considered in DOM in some circumstances...
// This isn't reproducible in IE10 compatibility mode of IE11
try {
if (isIE(10)) {
rect = element.getBoundingClientRect();
const scrollTop = getScroll(element, 'top');
const scrollLeft = getScroll(element, 'left');
rect.top += scrollTop;
rect.left += scrollLeft;
rect.bottom += scrollTop;
rect.right += scrollLeft;
} else {
rect = element.getBoundingClientRect();
}
} catch (e) {}
const result = {
left: rect.left,
top: rect.top,
width: rect.right - rect.left,
height: rect.bottom - rect.top
};
// subtract scrollbar size from sizes
const sizes = element.nodeName === 'HTML' ? getWindowSizes(element.ownerDocument) : {};
const width = sizes.width || element.clientWidth || result.right - result.left;
const height = sizes.height || element.clientHeight || result.bottom - result.top;
let horizScrollbar = element.offsetWidth - width;
let vertScrollbar = element.offsetHeight - height;
// if an hypothetical scrollbar is detected, we must be sure it's not a `border`
// we make this check conditional for performance reasons
if (horizScrollbar || vertScrollbar) {
const styles = getStyleComputedProperty(element);
horizScrollbar -= getBordersSize(styles, 'x');
vertScrollbar -= getBordersSize(styles, 'y');
result.width -= horizScrollbar;
result.height -= vertScrollbar;
}
return getClientRect(result);
}
function getOffsetRectRelativeToArbitraryNode(children, parent, fixedPosition = false) {
const isIE10 = isIE(10);
const isHTML = parent.nodeName === 'HTML';
const childrenRect = getBoundingClientRect(children);
const parentRect = getBoundingClientRect(parent);
const scrollParent = getScrollParent(children);
const styles = getStyleComputedProperty(parent);
const borderTopWidth = parseFloat(styles.borderTopWidth, 10);
const borderLeftWidth = parseFloat(styles.borderLeftWidth, 10);
// In cases where the parent is fixed, we must ignore negative scroll in offset calc
if (fixedPosition && isHTML) {
parentRect.top = Math.max(parentRect.top, 0);
parentRect.left = Math.max(parentRect.left, 0);
}
let offsets = getClientRect({
top: childrenRect.top - parentRect.top - borderTopWidth,
left: childrenRect.left - parentRect.left - borderLeftWidth,
width: childrenRect.width,
height: childrenRect.height
});
offsets.marginTop = 0;
offsets.marginLeft = 0;
// Subtract margins of documentElement in case it's being used as parent
// we do this only on HTML because it's the only element that behaves
// differently when margins are applied to it. The margins are included in
// the box of the documentElement, in the other cases not.
if (!isIE10 && isHTML) {
const marginTop = parseFloat(styles.marginTop, 10);
const marginLeft = parseFloat(styles.marginLeft, 10);
offsets.top -= borderTopWidth - marginTop;
offsets.bottom -= borderTopWidth - marginTop;
offsets.left -= borderLeftWidth - marginLeft;
offsets.right -= borderLeftWidth - marginLeft;
// Attach marginTop and marginLeft because in some circumstances we may need them
offsets.marginTop = marginTop;
offsets.marginLeft = marginLeft;
}
if (isIE10 && !fixedPosition ? parent.contains(scrollParent) : parent === scrollParent && scrollParent.nodeName !== 'BODY') {
offsets = includeScroll(offsets, parent);
}
return offsets;
}
function getViewportOffsetRectRelativeToArtbitraryNode(element, excludeScroll = false) {
const html = element.ownerDocument.documentElement;
const relativeOffset = getOffsetRectRelativeToArbitraryNode(element, html);
const width = Math.max(html.clientWidth, window.innerWidth || 0);
const height = Math.max(html.clientHeight, window.innerHeight || 0);
const scrollTop = !excludeScroll ? getScroll(html) : 0;
const scrollLeft = !excludeScroll ? getScroll(html, 'left') : 0;
const offset = {
top: scrollTop - relativeOffset.top + relativeOffset.marginTop,
left: scrollLeft - relativeOffset.left + relativeOffset.marginLeft,
width,
height
};
return getClientRect(offset);
}
/**
* Check if the given element is fixed or is inside a fixed parent
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @argument {Element} customContainer
* @returns {Boolean} answer to "isFixed?"
*/
function isFixed(element) {
const nodeName = element.nodeName;
if (nodeName === 'BODY' || nodeName === 'HTML') {
return false;
}
if (getStyleComputedProperty(element, 'position') === 'fixed') {
return true;
}
const parentNode = getParentNode(element);
if (!parentNode) {
return false;
}
return isFixed(parentNode);
}
/**
* Finds the first parent of an element that has a transformed property defined
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @returns {Element} first transformed parent or documentElement
*/
function getFixedPositionOffsetParent(element) {
// This check is needed to avoid errors in case one of the elements isn't defined for any reason
if (!element || !element.parentElement || isIE()) {
return document.documentElement;
}
let el = element.parentElement;
while (el && getStyleComputedProperty(el, 'transform') === 'none') {
el = el.parentElement;
}
return el || document.documentElement;
}
/**
* Computed the boundaries limits and return them
* @method
* @memberof Popper.Utils
* @param {HTMLElement} popper
* @param {HTMLElement} reference
* @param {number} padding
* @param {HTMLElement} boundariesElement - Element used to define the boundaries
* @param {Boolean} fixedPosition - Is in fixed position mode
* @returns {Object} Coordinates of the boundaries
*/
function getBoundaries(popper, reference, padding, boundariesElement, fixedPosition = false) {
// NOTE: 1 DOM access here
let boundaries = { top: 0, left: 0 };
const offsetParent = fixedPosition ? getFixedPositionOffsetParent(popper) : findCommonOffsetParent(popper, reference);
// Handle viewport case
if (boundariesElement === 'viewport') {
boundaries = getViewportOffsetRectRelativeToArtbitraryNode(offsetParent, fixedPosition);
} else {
// Handle other cases based on DOM element used as boundaries
let boundariesNode;
if (boundariesElement === 'scrollParent') {
boundariesNode = getScrollParent(getParentNode(reference));
if (boundariesNode.nodeName === 'BODY') {
boundariesNode = popper.ownerDocument.documentElement;
}
} else if (boundariesElement === 'window') {
boundariesNode = popper.ownerDocument.documentElement;
} else {
boundariesNode = boundariesElement;
}
const offsets = getOffsetRectRelativeToArbitraryNode(boundariesNode, offsetParent, fixedPosition);
// In case of HTML, we need a different computation
if (boundariesNode.nodeName === 'HTML' && !isFixed(offsetParent)) {
const { height, width } = getWindowSizes(popper.ownerDocument);
boundaries.top += offsets.top - offsets.marginTop;
boundaries.bottom = height + offsets.top;
boundaries.left += offsets.left - offsets.marginLeft;
boundaries.right = width + offsets.left;
} else {
// for all the other DOM elements, this one is good
boundaries = offsets;
}
}
// Add paddings
padding = padding || 0;
const isPaddingNumber = typeof padding === 'number';
boundaries.left += isPaddingNumber ? padding : padding.left || 0;
boundaries.top += isPaddingNumber ? padding : padding.top || 0;
boundaries.right -= isPaddingNumber ? padding : padding.right || 0;
boundaries.bottom -= isPaddingNumber ? padding : padding.bottom || 0;
return boundaries;
}
function getArea({ width, height }) {
return width * height;
}
/**
* Utility used to transform the `auto` placement to the placement with more
* available space.
* @method
* @memberof Popper.Utils
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function computeAutoPlacement(placement, refRect, popper, reference, boundariesElement, padding = 0) {
if (placement.indexOf('auto') === -1) {
return placement;
}
const boundaries = getBoundaries(popper, reference, padding, boundariesElement);
const rects = {
top: {
width: boundaries.width,
height: refRect.top - boundaries.top
},
right: {
width: boundaries.right - refRect.right,
height: boundaries.height
},
bottom: {
width: boundaries.width,
height: boundaries.bottom - refRect.bottom
},
left: {
width: refRect.left - boundaries.left,
height: boundaries.height
}
};
const sortedAreas = Object.keys(rects).map(key => _extends({
key
}, rects[key], {
area: getArea(rects[key])
})).sort((a, b) => b.area - a.area);
const filteredAreas = sortedAreas.filter(({ width, height }) => width >= popper.clientWidth && height >= popper.clientHeight);
const computedPlacement = filteredAreas.length > 0 ? filteredAreas[0].key : sortedAreas[0].key;
const variation = placement.split('-')[1];
return computedPlacement + (variation ? `-${variation}` : '');
}
/**
* Get offsets to the reference element
* @method
* @memberof Popper.Utils
* @param {Object} state
* @param {Element} popper - the popper element
* @param {Element} reference - the reference element (the popper will be relative to this)
* @param {Element} fixedPosition - is in fixed position mode
* @returns {Object} An object containing the offsets which will be applied to the popper
*/
function getReferenceOffsets(state, popper, reference, fixedPosition = null) {
const commonOffsetParent = fixedPosition ? getFixedPositionOffsetParent(popper) : findCommonOffsetParent(popper, reference);
return getOffsetRectRelativeToArbitraryNode(reference, commonOffsetParent, fixedPosition);
}
/**
* Get the outer sizes of the given element (offset size + margins)
* @method
* @memberof Popper.Utils
* @argument {Element} element
* @returns {Object} object containing width and height properties
*/
function getOuterSizes(element) {
const window = element.ownerDocument.defaultView;
const styles = window.getComputedStyle(element);
const x = parseFloat(styles.marginTop || 0) + parseFloat(styles.marginBottom || 0);
const y = parseFloat(styles.marginLeft || 0) + parseFloat(styles.marginRight || 0);
const result = {
width: element.offsetWidth + y,
height: element.offsetHeight + x
};
return result;
}
/**
* Get the opposite placement of the given one
* @method
* @memberof Popper.Utils
* @argument {String} placement
* @returns {String} flipped placement
*/
function getOppositePlacement(placement) {
const hash = { left: 'right', right: 'left', bottom: 'top', top: 'bottom' };
return placement.replace(/left|right|bottom|top/g, matched => hash[matched]);
}
/**
* Get offsets to the popper
* @method
* @memberof Popper.Utils
* @param {Object} position - CSS position the Popper will get applied
* @param {HTMLElement} popper - the popper element
* @param {Object} referenceOffsets - the reference offsets (the popper will be relative to this)
* @param {String} placement - one of the valid placement options
* @returns {Object} popperOffsets - An object containing the offsets which will be applied to the popper
*/
function getPopperOffsets(popper, referenceOffsets, placement) {
placement = placement.split('-')[0];
// Get popper node sizes
const popperRect = getOuterSizes(popper);
// Add position, width and height to our offsets object
const popperOffsets = {
width: popperRect.width,
height: popperRect.height
};
// depending by the popper placement we have to compute its offsets slightly differently
const isHoriz = ['right', 'left'].indexOf(placement) !== -1;
const mainSide = isHoriz ? 'top' : 'left';
const secondarySide = isHoriz ? 'left' : 'top';
const measurement = isHoriz ? 'height' : 'width';
const secondaryMeasurement = !isHoriz ? 'height' : 'width';
popperOffsets[mainSide] = referenceOffsets[mainSide] + referenceOffsets[measurement] / 2 - popperRect[measurement] / 2;
if (placement === secondarySide) {
popperOffsets[secondarySide] = referenceOffsets[secondarySide] - popperRect[secondaryMeasurement];
} else {
popperOffsets[secondarySide] = referenceOffsets[getOppositePlacement(secondarySide)];
}
return popperOffsets;
}
/**
* Mimics the `find` method of Array
* @method
* @memberof Popper.Utils
* @argument {Array} arr
* @argument prop
* @argument value
* @returns index_ori.html or -1
*/
function find(arr, check) {
// use native find if supported
if (Array.prototype.find) {
return arr.find(check);
}
// use `filter` to obtain the same behavior of `find`
return arr.filter(check)[0];
}
/**
* Return the index of the matching object
* @method
* @memberof Popper.Utils
* @argument {Array} arr
* @argument prop
* @argument value
* @returns index_ori.html or -1
*/
function findIndex(arr, prop, value) {
// use native findIndex if supported
if (Array.prototype.findIndex) {
return arr.findIndex(cur => cur[prop] === value);
}
// use `find` + `indexOf` if `findIndex` isn't supported
const match = find(arr, obj => obj[prop] === value);
return arr.indexOf(match);
}
/**
* Loop trough the list of modifiers and run them in order,
* each of them will then edit the data object.
* @method
* @memberof Popper.Utils
* @param {dataObject} data
* @param {Array} modifiers
* @param {String} ends - Optional modifier name used as stopper
* @returns {dataObject}
*/
function runModifiers(modifiers, data, ends) {
const modifiersToRun = ends === undefined ? modifiers : modifiers.slice(0, findIndex(modifiers, 'name', ends));
modifiersToRun.forEach(modifier => {
if (modifier['function']) {
// eslint-disable-line dot-notation
console.warn('`modifier.function` is deprecated, use `modifier.fn`!');
}
const fn = modifier['function'] || modifier.fn; // eslint-disable-line dot-notation
if (modifier.enabled && isFunction(fn)) {
// Add properties to offsets to make them a complete clientRect object
// we do this before each modifier to make sure the previous one doesn't
// mess with these values
data.offsets.popper = getClientRect(data.offsets.popper);
data.offsets.reference = getClientRect(data.offsets.reference);
data = fn(data, modifier);
}
});
return data;
}
/**
* Updates the position of the popper, computing the new offsets and applying
* the new style.<br />
* Prefer `scheduleUpdate` over `update` because of performance reasons.
* @method
* @memberof Popper
*/
function update() {
// if popper is destroyed, don't perform any further update
if (this.state.isDestroyed) {
return;
}
let data = {
instance: this,
styles: {},
arrowStyles: {},
attributes: {},
flipped: false,
offsets: {}
};
// compute reference element offsets
data.offsets.reference = getReferenceOffsets(this.state, this.popper, this.reference, this.options.positionFixed);
// compute auto placement, store placement inside the data object,
// modifiers will be able to edit `placement` if needed
// and refer to originalPlacement to know the original value
data.placement = computeAutoPlacement(this.options.placement, data.offsets.reference, this.popper, this.reference, this.options.modifiers.flip.boundariesElement, this.options.modifiers.flip.padding);
// store the computed placement inside `originalPlacement`
data.originalPlacement = data.placement;
data.positionFixed = this.options.positionFixed;
// compute the popper offsets
data.offsets.popper = getPopperOffsets(this.popper, data.offsets.reference, data.placement);
data.offsets.popper.position = this.options.positionFixed ? 'fixed' : 'absolute';
// run the modifiers
data = runModifiers(this.modifiers, data);
// the first `update` will call `onCreate` callback
// the other ones will call `onUpdate` callback
if (!this.state.isCreated) {
this.state.isCreated = true;
this.options.onCreate(data);
} else {
this.options.onUpdate(data);
}
}
/**
* Helper used to know if the given modifier is enabled.
* @method
* @memberof Popper.Utils
* @returns {Boolean}
*/
function isModifierEnabled(modifiers, modifierName) {
return modifiers.some(({ name, enabled }) => enabled && name === modifierName);
}
/**
* Get the prefixed supported property name
* @method
* @memberof Popper.Utils
* @argument {String} property (camelCase)
* @returns {String} prefixed property (camelCase or PascalCase, depending on the vendor prefix)
*/
function getSupportedPropertyName(property) {
const prefixes = [false, 'ms', 'Webkit', 'Moz', 'O'];
const upperProp = property.charAt(0).toUpperCase() + property.slice(1);
for (let i = 0; i < prefixes.length; i++) {
const prefix = prefixes[i];
const toCheck = prefix ? `${prefix}${upperProp}` : property;
if (typeof document.body.style[toCheck] !== 'undefined') {
return toCheck;
}
}
return null;
}
/**
* Destroys the popper.
* @method
* @memberof Popper
*/
function destroy() {
this.state.isDestroyed = true;
// touch DOM only if `applyStyle` modifier is enabled
if (isModifierEnabled(this.modifiers, 'applyStyle')) {
this.popper.removeAttribute('x-placement');
this.popper.style.position = '';
this.popper.style.top = '';
this.popper.style.left = '';
this.popper.style.right = '';
this.popper.style.bottom = '';
this.popper.style.willChange = '';
this.popper.style[getSupportedPropertyName('transform')] = '';
}
this.disableEventListeners();
// remove the popper if user explicity asked for the deletion on destroy
// do not use `remove` because IE11 doesn't support it
if (this.options.removeOnDestroy) {
this.popper.parentNode.removeChild(this.popper);
}
return this;
}
/**
* Get the window associated with the element
* @argument {Element} element
* @returns {Window}
*/
function getWindow(element) {
const ownerDocument = element.ownerDocument;
return ownerDocument ? ownerDocument.defaultView : window;
}
function attachToScrollParents(scrollParent, event, callback, scrollParents) {
const isBody = scrollParent.nodeName === 'BODY';
const target = isBody ? scrollParent.ownerDocument.defaultView : scrollParent;
target.addEventListener(event, callback, { passive: true });
if (!isBody) {
attachToScrollParents(getScrollParent(target.parentNode), event, callback, scrollParents);
}
scrollParents.push(target);
}
/**
* Setup needed event listeners used to update the popper position
* @method
* @memberof Popper.Utils
* @private
*/
function setupEventListeners(reference, options, state, updateBound) {
// Resize event listener on window
state.updateBound = updateBound;
getWindow(reference).addEventListener('resize', state.updateBound, { passive: true });
// Scroll event listener on scroll parents
const scrollElement = getScrollParent(reference);
attachToScrollParents(scrollElement, 'scroll', state.updateBound, state.scrollParents);
state.scrollElement = scrollElement;
state.eventsEnabled = true;
return state;
}
/**
* It will add resize/scroll events and start recalculating
* position of the popper element when they are triggered.
* @method
* @memberof Popper
*/
function enableEventListeners() {
if (!this.state.eventsEnabled) {
this.state = setupEventListeners(this.reference, this.options, this.state, this.scheduleUpdate);
}
}
/**
* Remove event listeners used to update the popper position
* @method
* @memberof Popper.Utils
* @private
*/
function removeEventListeners(reference, state) {
// Remove resize event listener on window
getWindow(reference).removeEventListener('resize', state.updateBound);
// Remove scroll event listener on scroll parents
state.scrollParents.forEach(target => {
target.removeEventListener('scroll', state.updateBound);
});
// Reset state
state.updateBound = null;
state.scrollParents = [];
state.scrollElement = null;
state.eventsEnabled = false;
return state;
}
/**
* It will remove resize/scroll events and won't recalculate popper position
* when they are triggered. It also won't trigger `onUpdate` callback anymore,
* unless you call `update` method manually.
* @method
* @memberof Popper
*/
function disableEventListeners() {
if (this.state.eventsEnabled) {
cancelAnimationFrame(this.scheduleUpdate);
this.state = removeEventListeners(this.reference, this.state);
}
}
/**
* Tells if a given input is a number
* @method
* @memberof Popper.Utils
* @param {*} input to check
* @return {Boolean}
*/
function isNumeric(n) {
return n !== '' && !isNaN(parseFloat(n)) && isFinite(n);
}
/**
* Set the style to the given popper
* @method
* @memberof Popper.Utils
* @argument {Element} element - Element to apply the style to
* @argument {Object} styles
* Object with a list of properties and values which will be applied to the element
*/
function setStyles(element, styles) {
Object.keys(styles).forEach(prop => {
let unit = '';
// add unit if the value is numeric and is one of the following
if (['width', 'height', 'top', 'right', 'bottom', 'left'].indexOf(prop) !== -1 && isNumeric(styles[prop])) {
unit = 'px';
}
element.style[prop] = styles[prop] + unit;
});
}
/**
* Set the attributes to the given popper
* @method
* @memberof Popper.Utils
* @argument {Element} element - Element to apply the attributes to
* @argument {Object} styles
* Object with a list of properties and values which will be applied to the element
*/
function setAttributes(element, attributes) {
Object.keys(attributes).forEach(function (prop) {
const value = attributes[prop];
if (value !== false) {
element.setAttribute(prop, attributes[prop]);
} else {
element.removeAttribute(prop);
}
});
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by `update` method
* @argument {Object} data.styles - List of style properties - values to apply to popper element
* @argument {Object} data.attributes - List of attribute properties - values to apply to popper element
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The same data object
*/
function
|
(data) {
// any property present in `data.styles` will be applied to the popper,
// in this way we can make the 3rd party modifiers add custom styles to it
// Be aware, modifiers could override the properties defined in the previous
// lines of this modifier!
setStyles(data.instance.popper, data.styles);
// any property present in `data.attributes` will be applied to the popper,
// they will be set as HTML attributes of the element
setAttributes(data.instance.popper, data.attributes);
// if arrowElement is defined and arrowStyles has some properties
if (data.arrowElement && Object.keys(data.arrowStyles).length) {
setStyles(data.arrowElement, data.arrowStyles);
}
return data;
}
/**
* Set the x-placement attribute before everything else because it could be used
* to add margins to the popper margins needs to be calculated to get the
* correct popper offsets.
* @method
* @memberof Popper.modifiers
* @param {HTMLElement} reference - The reference element used to position the popper
* @param {HTMLElement} popper - The HTML element used as popper
* @param {Object} options - Popper.js options
*/
function applyStyleOnLoad(reference, popper, options, modifierOptions, state) {
// compute reference element offsets
const referenceOffsets = getReferenceOffsets(state, popper, reference, options.positionFixed);
// compute auto placement, store placement inside the data object,
// modifiers will be able to edit `placement` if needed
// and refer to originalPlacement to know the original value
const placement = computeAutoPlacement(options.placement, referenceOffsets, popper, reference, options.modifiers.flip.boundariesElement, options.modifiers.flip.padding);
popper.setAttribute('x-placement', placement);
// Apply `position` to popper before anything else because
// without the position applied we can't guarantee correct computations
setStyles(popper, { position: options.positionFixed ? 'fixed' : 'absolute' });
return options;
}
/**
* @function
* @memberof Popper.Utils
* @argument {Object} data - The data object generated by `update` method
* @argument {Boolean} shouldRound - If the offsets should be rounded at all
* @returns {Object} The popper's position offsets rounded
*
* The tale of pixel-perfect positioning. It's still not 100% perfect, but as
* good as it can be within reason.
* Discussion here: https://github.com/FezVrasta/popper.js/pull/715
*
* Low DPI screens cause a popper to be blurry if not using full pixels (Safari
* as well on High DPI screens).
*
* Firefox prefers no rounding for positioning and does not have blurriness on
* high DPI screens.
*
* Only horizontal placement and left/right values need to be considered.
*/
function getRoundedOffsets(data, shouldRound) {
const { popper, reference } = data.offsets;
const { round, floor } = Math;
const noRound = v => v;
const referenceWidth = round(reference.width);
const popperWidth = round(popper.width);
const isVertical = ['left', 'right'].indexOf(data.placement) !== -1;
const isVariation = data.placement.indexOf('-') !== -1;
const sameWidthParity = referenceWidth % 2 === popperWidth % 2;
const bothOddWidth = referenceWidth % 2 === 1 && popperWidth % 2 === 1;
const horizontalToInteger = !shouldRound ? noRound : isVertical || isVariation || sameWidthParity ? round : floor;
const verticalToInteger = !shouldRound ? noRound : round;
return {
left: horizontalToInteger(bothOddWidth && !isVariation && shouldRound ? popper.left - 1 : popper.left),
top: verticalToInteger(popper.top),
bottom: verticalToInteger(popper.bottom),
right: horizontalToInteger(popper.right)
};
}
const isFirefox = isBrowser && /Firefox/i.test(navigator.userAgent);
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by `update` method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function computeStyle(data, options) {
const { x, y } = options;
const { popper } = data.offsets;
// Remove this legacy support in Popper.js v2
const legacyGpuAccelerationOption = find(data.instance.modifiers, modifier => modifier.name === 'applyStyle').gpuAcceleration;
if (legacyGpuAccelerationOption !== undefined) {
console.warn('WARNING: `gpuAcceleration` option moved to `computeStyle` modifier and will not be supported in future versions of Popper.js!');
}
const gpuAcceleration = legacyGpuAccelerationOption !== undefined ? legacyGpuAccelerationOption : options.gpuAcceleration;
const offsetParent = getOffsetParent(data.instance.popper);
const offsetParentRect = getBoundingClientRect(offsetParent);
// Styles
const styles = {
position: popper.position
};
const offsets = getRoundedOffsets(data, window.devicePixelRatio < 2 || !isFirefox);
const sideA = x === 'bottom' ? 'top' : 'bottom';
const sideB = y === 'right' ? 'left' : 'right';
// if gpuAcceleration is set to `true` and transform is supported,
// we use `translate3d` to apply the position to the popper we
// automatically use the supported prefixed version if needed
const prefixedProperty = getSupportedPropertyName('transform');
// now, let's make a step back and look at this code closely (wtf?)
// If the content of the popper grows once it's been positioned, it
// may happen that the popper gets misplaced because of the new content
// overflowing its reference element
// To avoid this problem, we provide two options (x and y), which allow
// the consumer to define the offset origin.
// If we position a popper on top of a reference element, we can set
// `x` to `top` to make the popper grow towards its top instead of
// its bottom.
let left, top;
if (sideA === 'bottom') {
// when offsetParent is <html> the positioning is relative to the bottom of the screen (excluding the scrollbar)
// and not the bottom of the html element
if (offsetParent.nodeName === 'HTML') {
top = -offsetParent.clientHeight + offsets.bottom;
} else {
top = -offsetParentRect.height + offsets.bottom;
}
} else {
top = offsets.top;
}
if (sideB === 'right') {
if (offsetParent.nodeName === 'HTML') {
left = -offsetParent.clientWidth + offsets.right;
} else {
left = -offsetParentRect.width + offsets.right;
}
} else {
left = offsets.left;
}
if (gpuAcceleration && prefixedProperty) {
styles[prefixedProperty] = `translate3d(${left}px, ${top}px, 0)`;
styles[sideA] = 0;
styles[sideB] = 0;
styles.willChange = 'transform';
} else {
// othwerise, we use the standard `top`, `left`, `bottom` and `right` properties
const invertTop = sideA === 'bottom' ? -1 : 1;
const invertLeft = sideB === 'right' ? -1 : 1;
styles[sideA] = top * invertTop;
styles[sideB] = left * invertLeft;
styles.willChange = `${sideA}, ${sideB}`;
}
// Attributes
const attributes = {
'x-placement': data.placement
};
// Update `data` attributes, styles and arrowStyles
data.attributes = _extends({}, attributes, data.attributes);
data.styles = _extends({}, styles, data.styles);
data.arrowStyles = _extends({}, data.offsets.arrow, data.arrowStyles);
return data;
}
/**
* Helper used to know if the given modifier depends from another one.<br />
* It checks if the needed modifier is listed and enabled.
* @method
* @memberof Popper.Utils
* @param {Array} modifiers - list of modifiers
* @param {String} requestingName - name of requesting modifier
* @param {String} requestedName - name of requested modifier
* @returns {Boolean}
*/
function isModifierRequired(modifiers, requestingName, requestedName) {
const requesting = find(modifiers, ({ name }) => name === requestingName);
const isRequired = !!requesting && modifiers.some(modifier => {
return modifier.name === requestedName && modifier.enabled && modifier.order < requesting.order;
});
if (!isRequired) {
const requesting = `\`${requestingName}\``;
const requested = `\`${requestedName}\``;
console.warn(`${requested} modifier is required by ${requesting} modifier in order to work, be sure to include it before ${requesting}!`);
}
return isRequired;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function arrow(data, options) {
// arrow depends on keepTogether in order to work
if (!isModifierRequired(data.instance.modifiers, 'arrow', 'keepTogether')) {
return data;
}
let arrowElement = options.element;
// if arrowElement is a string, suppose it's a CSS selector
if (typeof arrowElement === 'string') {
arrowElement = data.instance.popper.querySelector(arrowElement);
// if arrowElement is not found, don't run the modifier
if (!arrowElement) {
return data;
}
} else {
// if the arrowElement isn't a query selector we must check that the
// provided DOM node is child of its popper node
if (!data.instance.popper.contains(arrowElement)) {
console.warn('WARNING: `arrow.element` must be child of its popper element!');
return data;
}
}
const placement = data.placement.split('-')[0];
const { popper, reference } = data.offsets;
const isVertical = ['left', 'right'].indexOf(placement) !== -1;
const len = isVertical ? 'height' : 'width';
const sideCapitalized = isVertical ? 'Top' : 'Left';
const side = sideCapitalized.toLowerCase();
const altSide = isVertical ? 'left' : 'top';
const opSide = isVertical ? 'bottom' : 'right';
const arrowElementSize = getOuterSizes(arrowElement)[len];
//
// extends keepTogether behavior making sure the popper and its
// reference have enough pixels in conjunction
//
// top/left side
if (reference[opSide] - arrowElementSize < popper[side]) {
data.offsets.popper[side] -= popper[side] - (reference[opSide] - arrowElementSize);
}
// bottom/right side
if (reference[side] + arrowElementSize > popper[opSide]) {
data.offsets.popper[side] += reference[side] + arrowElementSize - popper[opSide];
}
data.offsets.popper = getClientRect(data.offsets.popper);
// compute center of the popper
const center = reference[side] + reference[len] / 2 - arrowElementSize / 2;
// Compute the sideValue using the updated popper offsets
// take popper margin in account because we don't have this info available
const css = getStyleComputedProperty(data.instance.popper);
const popperMarginSide = parseFloat(css[`margin${sideCapitalized}`], 10);
const popperBorderSide = parseFloat(css[`border${sideCapitalized}Width`], 10);
let sideValue = center - data.offsets.popper[side] - popperMarginSide - popperBorderSide;
// prevent arrowElement from being placed not contiguously to its popper
sideValue = Math.max(Math.min(popper[len] - arrowElementSize, sideValue), 0);
data.arrowElement = arrowElement;
data.offsets.arrow = {
[side]: Math.round(sideValue),
[altSide]: '' // make sure to unset any eventual altSide value from the DOM node
};
return data;
}
/**
* Get the opposite placement variation of the given one
* @method
* @memberof Popper.Utils
* @argument {String} placement variation
* @returns {String} flipped placement variation
*/
function getOppositeVariation(variation) {
if (variation === 'end') {
return 'start';
} else if (variation === 'start') {
return 'end';
}
return variation;
}
/**
* List of accepted placements to use as values of the `placement` option.<br />
* Valid placements are:
* - `auto`
* - `top`
* - `right`
* - `bottom`
* - `left`
*
* Each placement can have a variation from this list:
* - `-start`
* - `-end`
*
* Variations are interpreted easily if you think of them as the left to right
* written languages. Horizontally (`top` and `bottom`), `start` is left and `end`
* is right.<br />
* Vertically (`left` and `right`), `start` is top and `end` is bottom.
*
* Some valid examples are:
* - `top-end` (on top of reference, right aligned)
* - `right-start` (on right of reference, top aligned)
* - `bottom` (on bottom, centered)
* - `auto-end` (on the side with more space available, alignment depends by placement)
*
* @static
* @type {Array}
* @enum {String}
* @readonly
* @method placements
* @memberof Popper
*/
var placements = ['auto-start', 'auto', 'auto-end', 'top-start', 'top', 'top-end', 'right-start', 'right', 'right-end', 'bottom-end', 'bottom', 'bottom-start', 'left-end', 'left', 'left-start'];
// Get rid of `auto` `auto-start` and `auto-end`
const validPlacements = placements.slice(3);
/**
* Given an initial placement, returns all the subsequent placements
* clockwise (or counter-clockwise).
*
* @method
* @memberof Popper.Utils
* @argument {String} placement - A valid placement (it accepts variations)
* @argument {Boolean} counter - Set to true to walk the placements counterclockwise
* @returns {Array} placements including their variations
*/
function clockwise(placement, counter = false) {
const index = validPlacements.indexOf(placement);
const arr = validPlacements.slice(index + 1).concat(validPlacements.slice(0, index));
return counter ? arr.reverse() : arr;
}
const BEHAVIORS = {
FLIP: 'flip',
CLOCKWISE: 'clockwise',
COUNTERCLOCKWISE: 'counterclockwise'
};
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function flip(data, options) {
// if `inner` modifier is enabled, we can't use the `flip` modifier
if (isModifierEnabled(data.instance.modifiers, 'inner')) {
return data;
}
if (data.flipped && data.placement === data.originalPlacement) {
// seems like flip is trying to loop, probably there's not enough space on any of the flippable sides
return data;
}
const boundaries = getBoundaries(data.instance.popper, data.instance.reference, options.padding, options.boundariesElement, data.positionFixed);
let placement = data.placement.split('-')[0];
let placementOpposite = getOppositePlacement(placement);
let variation = data.placement.split('-')[1] || '';
let flipOrder = [];
switch (options.behavior) {
case BEHAVIORS.FLIP:
flipOrder = [placement, placementOpposite];
break;
case BEHAVIORS.CLOCKWISE:
flipOrder = clockwise(placement);
break;
case BEHAVIORS.COUNTERCLOCKWISE:
flipOrder = clockwise(placement, true);
break;
default:
flipOrder = options.behavior;
}
flipOrder.forEach((step, index) => {
if (placement !== step || flipOrder.length === index + 1) {
return data;
}
placement = data.placement.split('-')[0];
placementOpposite = getOppositePlacement(placement);
const popperOffsets = data.offsets.popper;
const refOffsets = data.offsets.reference;
// using floor because the reference offsets may contain decimals we are not going to consider here
const floor = Math.floor;
const overlapsRef = placement === 'left' && floor(popperOffsets.right) > floor(refOffsets.left) || placement === 'right' && floor(popperOffsets.left) < floor(refOffsets.right) || placement === 'top' && floor(popperOffsets.bottom) > floor(refOffsets.top) || placement === 'bottom' && floor(popperOffsets.top) < floor(refOffsets.bottom);
const overflowsLeft = floor(popperOffsets.left) < floor(boundaries.left);
const overflowsRight = floor(popperOffsets.right) > floor(boundaries.right);
const overflowsTop = floor(popperOffsets.top) < floor(boundaries.top);
const overflowsBottom = floor(popperOffsets.bottom) > floor(boundaries.bottom);
const overflowsBoundaries = placement === 'left' && overflowsLeft || placement === 'right' && overflowsRight || placement === 'top' && overflowsTop || placement === 'bottom' && overflowsBottom;
// flip the variation if required
const isVertical = ['top', 'bottom'].indexOf(placement) !== -1;
// flips variation if reference element overflows boundaries
const flippedVariationByRef = !!options.flipVariations && (isVertical && variation === 'start' && overflowsLeft || isVertical && variation === 'end' && overflowsRight || !isVertical && variation === 'start' && overflowsTop || !isVertical && variation === 'end' && overflowsBottom);
// flips variation if popper content overflows boundaries
const flippedVariationByContent = !!options.flipVariationsByContent && (isVertical && variation === 'start' && overflowsRight || isVertical && variation === 'end' && overflowsLeft || !isVertical && variation === 'start' && overflowsBottom || !isVertical && variation === 'end' && overflowsTop);
const flippedVariation = flippedVariationByRef || flippedVariationByContent;
if (overlapsRef || overflowsBoundaries || flippedVariation) {
// this boolean to detect any flip loop
data.flipped = true;
if (overlapsRef || overflowsBoundaries) {
placement = flipOrder[index + 1];
}
if (flippedVariation) {
variation = getOppositeVariation(variation);
}
data.placement = placement + (variation ? '-' + variation : '');
// this object contains `position`, we want to preserve it along with
// any additional property we may add in the future
data.offsets.popper = _extends({}, data.offsets.popper, getPopperOffsets(data.instance.popper, data.offsets.reference, data.placement));
data = runModifiers(data.instance.modifiers, data, 'flip');
}
});
return data;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function keepTogether(data) {
const { popper, reference } = data.offsets;
const placement = data.placement.split('-')[0];
const floor = Math.floor;
const isVertical = ['top', 'bottom'].indexOf(placement) !== -1;
const side = isVertical ? 'right' : 'bottom';
const opSide = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
if (popper[side] < floor(reference[opSide])) {
data.offsets.popper[opSide] = floor(reference[opSide]) - popper[measurement];
}
if (popper[opSide] > floor(reference[side])) {
data.offsets.popper[opSide] = floor(reference[side]);
}
return data;
}
/**
* Converts a string containing value + unit into a px value number
* @function
* @memberof {modifiers~offset}
* @private
* @argument {String} str - Value + unit string
* @argument {String} measurement - `height` or `width`
* @argument {Object} popperOffsets
* @argument {Object} referenceOffsets
* @returns {Number|String}
* Value in pixels, or original string if no values were extracted
*/
function toValue(str, measurement, popperOffsets, referenceOffsets) {
// separate value from unit
const split = str.match(/((?:\-|\+)?\d*\.?\d*)(.*)/);
const value = +split[1];
const unit = split[2];
// If it's not a number it's an operator, I guess
if (!value) {
return str;
}
if (unit.indexOf('%') === 0) {
let element;
switch (unit) {
case '%p':
element = popperOffsets;
break;
case '%':
case '%r':
default:
element = referenceOffsets;
}
const rect = getClientRect(element);
return rect[measurement] / 100 * value;
} else if (unit === 'vh' || unit === 'vw') {
// if is a vh or vw, we calculate the size based on the viewport
let size;
if (unit === 'vh') {
size = Math.max(document.documentElement.clientHeight, window.innerHeight || 0);
} else {
size = Math.max(document.documentElement.clientWidth, window.innerWidth || 0);
}
return size / 100 * value;
} else {
// if is an explicit pixel unit, we get rid of the unit and keep the value
// if is an implicit unit, it's px, and we return just the value
return value;
}
}
/**
* Parse an `offset` string to extrapolate `x` and `y` numeric offsets.
* @function
* @memberof {modifiers~offset}
* @private
* @argument {String} offset
* @argument {Object} popperOffsets
* @argument {Object} referenceOffsets
* @argument {String} basePlacement
* @returns {Array} a two cells array with x and y offsets in numbers
*/
function parseOffset(offset, popperOffsets, referenceOffsets, basePlacement) {
const offsets = [0, 0];
// Use height if placement is left or right and index is 0 otherwise use width
// in this way the first offset will use an axis and the second one
// will use the other one
const useHeight = ['right', 'left'].indexOf(basePlacement) !== -1;
// Split the offset string to obtain a list of values and operands
// The regex addresses values with the plus or minus sign in front (+10, -20, etc)
const fragments = offset.split(/(\+|\-)/).map(frag => frag.trim());
// Detect if the offset string contains a pair of values or a single one
// they could be separated by comma or space
const divider = fragments.indexOf(find(fragments, frag => frag.search(/,|\s/) !== -1));
if (fragments[divider] && fragments[divider].indexOf(',') === -1) {
console.warn('Offsets separated by white space(s) are deprecated, use a comma (,) instead.');
}
// If divider is found, we divide the list of values and operands to divide
// them by ofset X and Y.
const splitRegex = /\s*,\s*|\s+/;
let ops = divider !== -1 ? [fragments.slice(0, divider).concat([fragments[divider].split(splitRegex)[0]]), [fragments[divider].split(splitRegex)[1]].concat(fragments.slice(divider + 1))] : [fragments];
// Convert the values with units to absolute pixels to allow our computations
ops = ops.map((op, index) => {
// Most of the units rely on the orientation of the popper
const measurement = (index === 1 ? !useHeight : useHeight) ? 'height' : 'width';
let mergeWithPrevious = false;
return op
// This aggregates any `+` or `-` sign that aren't considered operators
// e.g.: 10 + +5 => [10, +, +5]
.reduce((a, b) => {
if (a[a.length - 1] === '' && ['+', '-'].indexOf(b) !== -1) {
a[a.length - 1] = b;
mergeWithPrevious = true;
return a;
} else if (mergeWithPrevious) {
a[a.length - 1] += b;
mergeWithPrevious = false;
return a;
} else {
return a.concat(b);
}
}, [])
// Here we convert the string values into number values (in px)
.map(str => toValue(str, measurement, popperOffsets, referenceOffsets));
});
// Loop trough the offsets arrays and execute the operations
ops.forEach((op, index) => {
op.forEach((frag, index2) => {
if (isNumeric(frag)) {
offsets[index] += frag * (op[index2 - 1] === '-' ? -1 : 1);
}
});
});
return offsets;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @argument {Number|String} options.offset=0
* The offset value as described in the modifier description
* @returns {Object} The data object, properly modified
*/
function offset(data, { offset }) {
const { placement, offsets: { popper, reference } } = data;
const basePlacement = placement.split('-')[0];
let offsets;
if (isNumeric(+offset)) {
offsets = [+offset, 0];
} else {
offsets = parseOffset(offset, popper, reference, basePlacement);
}
if (basePlacement === 'left') {
popper.top += offsets[0];
popper.left -= offsets[1];
} else if (basePlacement === 'right') {
popper.top += offsets[0];
popper.left += offsets[1];
} else if (basePlacement === 'top') {
popper.left += offsets[0];
popper.top -= offsets[1];
} else if (basePlacement === 'bottom') {
popper.left += offsets[0];
popper.top += offsets[1];
}
data.popper = popper;
return data;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by `update` method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function preventOverflow(data, options) {
let boundariesElement = options.boundariesElement || getOffsetParent(data.instance.popper);
// If offsetParent is the reference element, we really want to
// go one step up and use the next offsetParent as reference to
// avoid to make this modifier completely useless and look like broken
if (data.instance.reference === boundariesElement) {
boundariesElement = getOffsetParent(boundariesElement);
}
// NOTE: DOM access here
// resets the popper's position so that the document size can be calculated excluding
// the size of the popper element itself
const transformProp = getSupportedPropertyName('transform');
const popperStyles = data.instance.popper.style; // assignment to help minification
const { top, left, [transformProp]: transform } = popperStyles;
popperStyles.top = '';
popperStyles.left = '';
popperStyles[transformProp] = '';
const boundaries = getBoundaries(data.instance.popper, data.instance.reference, options.padding, boundariesElement, data.positionFixed);
// NOTE: DOM access here
// restores the original style properties after the offsets have been computed
popperStyles.top = top;
popperStyles.left = left;
popperStyles[transformProp] = transform;
options.boundaries = boundaries;
const order = options.priority;
let popper = data.offsets.popper;
const check = {
primary(placement) {
let value = popper[placement];
if (popper[placement] < boundaries[placement] && !options.escapeWithReference) {
value = Math.max(popper[placement], boundaries[placement]);
}
return { [placement]: value };
},
secondary(placement) {
const mainSide = placement === 'right' ? 'left' : 'top';
let value = popper[mainSide];
if (popper[placement] > boundaries[placement] && !options.escapeWithReference) {
value = Math.min(popper[mainSide], boundaries[placement] - (placement === 'right' ? popper.width : popper.height));
}
return { [mainSide]: value };
}
};
order.forEach(placement => {
const side = ['left', 'top'].indexOf(placement) !== -1 ? 'primary' : 'secondary';
popper = _extends({}, popper, check[side](placement));
});
data.offsets.popper = popper;
return data;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by `update` method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function shift(data) {
const placement = data.placement;
const basePlacement = placement.split('-')[0];
const shiftvariation = placement.split('-')[1];
// if shift shiftvariation is specified, run the modifier
if (shiftvariation) {
const { reference, popper } = data.offsets;
const isVertical = ['bottom', 'top'].indexOf(basePlacement) !== -1;
const side = isVertical ? 'left' : 'top';
const measurement = isVertical ? 'width' : 'height';
const shiftOffsets = {
start: { [side]: reference[side] },
end: {
[side]: reference[side] + reference[measurement] - popper[measurement]
}
};
data.offsets.popper = _extends({}, popper, shiftOffsets[shiftvariation]);
}
return data;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by update method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function hide(data) {
if (!isModifierRequired(data.instance.modifiers, 'hide', 'preventOverflow')) {
return data;
}
const refRect = data.offsets.reference;
const bound = find(data.instance.modifiers, modifier => modifier.name === 'preventOverflow').boundaries;
if (refRect.bottom < bound.top || refRect.left > bound.right || refRect.top > bound.bottom || refRect.right < bound.left) {
// Avoid unnecessary DOM access if visibility hasn't changed
if (data.hide === true) {
return data;
}
data.hide = true;
data.attributes['x-out-of-boundaries'] = '';
} else {
// Avoid unnecessary DOM access if visibility hasn't changed
if (data.hide === false) {
return data;
}
data.hide = false;
data.attributes['x-out-of-boundaries'] = false;
}
return data;
}
/**
* @function
* @memberof Modifiers
* @argument {Object} data - The data object generated by `update` method
* @argument {Object} options - Modifiers configuration and options
* @returns {Object} The data object, properly modified
*/
function inner(data) {
const placement = data.placement;
const basePlacement = placement.split('-')[0];
const { popper, reference } = data.offsets;
const isHoriz = ['left', 'right'].indexOf(basePlacement) !== -1;
const subtractLength = ['top', 'left'].indexOf(basePlacement) === -1;
popper[isHoriz ? 'left' : 'top'] = reference[basePlacement] - (subtractLength ? popper[isHoriz ? 'width' : 'height'] : 0);
data.placement = getOppositePlacement(placement);
data.offsets.popper = getClientRect(popper);
return data;
}
/**
* Modifier function, each modifier can have a function of this type assigned
* to its `fn` property.<br />
* These functions will be called on each update, this means that you must
* make sure they are performant enough to avoid performance bottlenecks.
*
* @function ModifierFn
* @argument {dataObject} data - The data object generated by `update` method
* @argument {Object} options - Modifiers configuration and options
* @returns {dataObject} The data object, properly modified
*/
/**
* Modifiers are plugins used to alter the behavior of your poppers.<br />
* Popper.js uses a set of 9 modifiers to provide all the basic functionalities
* needed by the library.
*
* Usually you don't want to override the `order`, `fn` and `onLoad` props.
* All the other properties are configurations that could be tweaked.
* @namespace modifiers
*/
var modifiers = {
/**
* Modifier used to shift the popper on the start or end of its reference
* element.<br />
* It will read the variation of the `placement` property.<br />
* It can be one either `-end` or `-start`.
* @memberof modifiers
* @inner
*/
shift: {
/** @prop {number} order=100 - Index used to define the order of execution */
order: 100,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: shift
},
/**
* The `offset` modifier can shift your popper on both its axis.
*
* It accepts the following units:
* - `px` or unit-less, interpreted as pixels
* - `%` or `%r`, percentage relative to the length of the reference element
* - `%p`, percentage relative to the length of the popper element
* - `vw`, CSS viewport width unit
* - `vh`, CSS viewport height unit
*
* For length is intended the main axis relative to the placement of the popper.<br />
* This means that if the placement is `top` or `bottom`, the length will be the
* `width`. In case of `left` or `right`, it will be the `height`.
*
* You can provide a single value (as `Number` or `String`), or a pair of values
* as `String` divided by a comma or one (or more) white spaces.<br />
* The latter is a deprecated method because it leads to confusion and will be
* removed in v2.<br />
* Additionally, it accepts additions and subtractions between different units.
* Note that multiplications and divisions aren't supported.
*
* Valid examples are:
* ```
* 10
* '10%'
* '10, 10'
* '10%, 10'
* '10 + 10%'
* '10 - 5vh + 3%'
* '-10px + 5vh, 5px - 6%'
* ```
* > **NB**: If you desire to apply offsets to your poppers in a way that may make them overlap
* > with their reference element, unfortunately, you will have to disable the `flip` modifier.
* > You can read more on this at this [issue](https://github.com/FezVrasta/popper.js/issues/373).
*
* @memberof modifiers
* @inner
*/
offset: {
/** @prop {number} order=200 - Index used to define the order of execution */
order: 200,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: offset,
/** @prop {Number|String} offset=0
* The offset value as described in the modifier description
*/
offset: 0
},
/**
* Modifier used to prevent the popper from being positioned outside the boundary.
*
* A scenario exists where the reference itself is not within the boundaries.<br />
* We can say it has "escaped the boundaries" — or just "escaped".<br />
* In this case we need to decide whether the popper should either:
*
* - detach from the reference and remain "trapped" in the boundaries, or
* - if it should ignore the boundary and "escape with its reference"
*
* When `escapeWithReference` is set to`true` and reference is completely
* outside its boundaries, the popper will overflow (or completely leave)
* the boundaries in order to remain attached to the edge of the reference.
*
* @memberof modifiers
* @inner
*/
preventOverflow: {
/** @prop {number} order=300 - Index used to define the order of execution */
order: 300,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: preventOverflow,
/**
* @prop {Array} [priority=['left','right','top','bottom']]
* Popper will try to prevent overflow following these priorities by default,
* then, it could overflow on the left and on top of the `boundariesElement`
*/
priority: ['left', 'right', 'top', 'bottom'],
/**
* @prop {number} padding=5
* Amount of pixel used to define a minimum distance between the boundaries
* and the popper. This makes sure the popper always has a little padding
* between the edges of its container
*/
padding: 5,
/**
* @prop {String|HTMLElement} boundariesElement='scrollParent'
* Boundaries used by the modifier. Can be `scrollParent`, `window`,
* `viewport` or any DOM element.
*/
boundariesElement: 'scrollParent'
},
/**
* Modifier used to make sure the reference and its popper stay near each other
* without leaving any gap between the two. Especially useful when the arrow is
* enabled and you want to ensure that it points to its reference element.
* It cares only about the first axis. You can still have poppers with margin
* between the popper and its reference element.
* @memberof modifiers
* @inner
*/
keepTogether: {
/** @prop {number} order=400 - Index used to define the order of execution */
order: 400,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: keepTogether
},
/**
* This modifier is used to move the `arrowElement` of the popper to make
* sure it is positioned between the reference element and its popper element.
* It will read the outer size of the `arrowElement` node to detect how many
* pixels of conjunction are needed.
*
* It has no effect if no `arrowElement` is provided.
* @memberof modifiers
* @inner
*/
arrow: {
/** @prop {number} order=500 - Index used to define the order of execution */
order: 500,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: arrow,
/** @prop {String|HTMLElement} element='[x-arrow]' - Selector or node used as arrow */
element: '[x-arrow]'
},
/**
* Modifier used to flip the popper's placement when it starts to overlap its
* reference element.
*
* Requires the `preventOverflow` modifier before it in order to work.
*
* **NOTE:** this modifier will interrupt the current update cycle and will
* restart it if it detects the need to flip the placement.
* @memberof modifiers
* @inner
*/
flip: {
/** @prop {number} order=600 - Index used to define the order of execution */
order: 600,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: flip,
/**
* @prop {String|Array} behavior='flip'
* The behavior used to change the popper's placement. It can be one of
* `flip`, `clockwise`, `counterclockwise` or an array with a list of valid
* placements (with optional variations)
*/
behavior: 'flip',
/**
* @prop {number} padding=5
* The popper will flip if it hits the edges of the `boundariesElement`
*/
padding: 5,
/**
* @prop {String|HTMLElement} boundariesElement='viewport'
* The element which will define the boundaries of the popper position.
* The popper will never be placed outside of the defined boundaries
* (except if `keepTogether` is enabled)
*/
boundariesElement: 'viewport',
/**
* @prop {Boolean} flipVariations=false
* The popper will switch placement variation between `-start` and `-end` when
* the reference element overlaps its boundaries.
*
* The original placement should have a set variation.
*/
flipVariations: false,
/**
* @prop {Boolean} flipVariationsByContent=false
* The popper will switch placement variation between `-start` and `-end` when
* the popper element overlaps its reference boundaries.
*
* The original placement should have a set variation.
*/
flipVariationsByContent: false
},
/**
* Modifier used to make the popper flow toward the inner of the reference element.
* By default, when this modifier is disabled, the popper will be placed outside
* the reference element.
* @memberof modifiers
* @inner
*/
inner: {
/** @prop {number} order=700 - Index used to define the order of execution */
order: 700,
/** @prop {Boolean} enabled=false - Whether the modifier is enabled or not */
enabled: false,
/** @prop {ModifierFn} */
fn: inner
},
/**
* Modifier used to hide the popper when its reference element is outside of the
* popper boundaries. It will set a `x-out-of-boundaries` attribute which can
* be used to hide with a CSS selector the popper when its reference is
* out of boundaries.
*
* Requires the `preventOverflow` modifier before it in order to work.
* @memberof modifiers
* @inner
*/
hide: {
/** @prop {number} order=800 - Index used to define the order of execution */
order: 800,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: hide
},
/**
* Computes the style that will be applied to the popper element to gets
* properly positioned.
*
* Note that this modifier will not touch the DOM, it just prepares the styles
* so that `applyStyle` modifier can apply it. This separation is useful
* in case you need to replace `applyStyle` with a custom implementation.
*
* This modifier has `850` as `order` value to maintain backward compatibility
* with previous versions of Popper.js. Expect the modifiers ordering method
* to change in future major versions of the library.
*
* @memberof modifiers
* @inner
*/
computeStyle: {
/** @prop {number} order=850 - Index used to define the order of execution */
order: 850,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: computeStyle,
/**
* @prop {Boolean} gpuAcceleration=true
* If true, it uses the CSS 3D transformation to position the popper.
* Otherwise, it will use the `top` and `left` properties
*/
gpuAcceleration: true,
/**
* @prop {string} [x='bottom']
* Where to anchor the X axis (`bottom` or `top`). AKA X offset origin.
* Change this if your popper should grow in a direction different from `bottom`
*/
x: 'bottom',
/**
* @prop {string} [x='left']
* Where to anchor the Y axis (`left` or `right`). AKA Y offset origin.
* Change this if your popper should grow in a direction different from `right`
*/
y: 'right'
},
/**
* Applies the computed styles to the popper element.
*
* All the DOM manipulations are limited to this modifier. This is useful in case
* you want to integrate Popper.js inside a framework or view library and you
* want to delegate all the DOM manipulations to it.
*
* Note that if you disable this modifier, you must make sure the popper element
* has its position set to `absolute` before Popper.js can do its work!
*
* Just disable this modifier and define your own to achieve the desired effect.
*
* @memberof modifiers
* @inner
*/
applyStyle: {
/** @prop {number} order=900 - Index used to define the order of execution */
order: 900,
/** @prop {Boolean} enabled=true - Whether the modifier is enabled or not */
enabled: true,
/** @prop {ModifierFn} */
fn: applyStyle,
/** @prop {Function} */
onLoad: applyStyleOnLoad,
/**
* @deprecated since version 1.10.0, the property moved to `computeStyle` modifier
* @prop {Boolean} gpuAcceleration=true
* If true, it uses the CSS 3D transformation to position the popper.
* Otherwise, it will use the `top` and `left` properties
*/
gpuAcceleration: undefined
}
};
/**
* The `dataObject` is an object containing all the information used by Popper.js.
* This object is passed to modifiers and to the `onCreate` and `onUpdate` callbacks.
* @name dataObject
* @property {Object} data.instance The Popper.js instance
* @property {String} data.placement Placement applied to popper
* @property {String} data.originalPlacement Placement originally defined on init
* @property {Boolean} data.flipped True if popper has been flipped by flip modifier
* @property {Boolean} data.hide True if the reference element is out of boundaries, useful to know when to hide the popper
* @property {HTMLElement} data.arrowElement Node used as arrow by arrow modifier
* @property {Object} data.styles Any CSS property defined here will be applied to the popper. It expects the JavaScript nomenclature (eg. `marginBottom`)
* @property {Object} data.arrowStyles Any CSS property defined here will be applied to the popper arrow. It expects the JavaScript nomenclature (eg. `marginBottom`)
* @property {Object} data.boundaries Offsets of the popper boundaries
* @property {Object} data.offsets The measurements of popper, reference and arrow elements
* @property {Object} data.offsets.popper `top`, `left`, `width`, `height` values
* @property {Object} data.offsets.reference `top`, `left`, `width`, `height` values
* @property {Object} data.offsets.arrow] `top` and `left` offsets, only one of them will be different from 0
*/
/**
* Default options provided to Popper.js constructor.<br />
* These can be overridden using the `options` argument of Popper.js.<br />
* To override an option, simply pass an object with the same
* structure of the `options` object, as the 3rd argument. For example:
* ```
* new Popper(ref, pop, {
* modifiers: {
* preventOverflow: { enabled: false }
* }
* })
* ```
* @type {Object}
* @static
* @memberof Popper
*/
var Defaults = {
/**
* Popper's placement.
* @prop {Popper.placements} placement='bottom'
*/
placement: 'bottom',
/**
* Set this to true if you want popper to position it self in 'fixed' mode
* @prop {Boolean} positionFixed=false
*/
positionFixed: false,
/**
* Whether events (resize, scroll) are initially enabled.
* @prop {Boolean} eventsEnabled=true
*/
eventsEnabled: true,
/**
* Set to true if you want to automatically remove the popper when
* you call the `destroy` method.
* @prop {Boolean} removeOnDestroy=false
*/
removeOnDestroy: false,
/**
* Callback called when the popper is created.<br />
* By default, it is set to no-op.<br />
* Access Popper.js instance with `data.instance`.
* @prop {onCreate}
*/
onCreate: () => {},
/**
* Callback called when the popper is updated. This callback is not called
* on the initialization/creation of the popper, but only on subsequent
* updates.<br />
* By default, it is set to no-op.<br />
* Access Popper.js instance with `data.instance`.
* @prop {onUpdate}
*/
onUpdate: () => {},
/**
* List of modifiers used to modify the offsets before they are applied to the popper.
* They provide most of the functionalities of Popper.js.
* @prop {modifiers}
*/
modifiers
};
/**
* @callback onCreate
* @param {dataObject} data
*/
/**
* @callback onUpdate
* @param {dataObject} data
*/
// Utils
// Methods
class Popper {
/**
* Creates a new Popper.js instance.
* @class Popper
* @param {Element|referenceObject} reference - The reference element used to position the popper
* @param {Element} popper - The HTML / XML element used as the popper
* @param {Object} options - Your custom options to override the ones defined in [Defaults](#defaults)
* @return {Object} instance - The generated Popper.js instance
*/
constructor(reference, popper, options = {}) {
this.scheduleUpdate = () => requestAnimationFrame(this.update);
// make update() debounced, so that it only runs at most once-per-tick
this.update = debounce(this.update.bind(this));
// with {} we create a new object with the options inside it
this.options = _extends({}, Popper.Defaults, options);
// init state
this.state = {
isDestroyed: false,
isCreated: false,
scrollParents: []
};
// get reference and popper elements (allow jQuery wrappers)
this.reference = reference && reference.jquery ? reference[0] : reference;
this.popper = popper && popper.jquery ? popper[0] : popper;
// Deep merge modifiers options
this.options.modifiers = {};
Object.keys(_extends({}, Popper.Defaults.modifiers, options.modifiers)).forEach(name => {
this.options.modifiers[name] = _extends({}, Popper.Defaults.modifiers[name] || {}, options.modifiers ? options.modifiers[name] : {});
});
// Refactoring modifiers' list (Object => Array)
this.modifiers = Object.keys(this.options.modifiers).map(name => _extends({
name
}, this.options.modifiers[name]))
// sort the modifiers by order
.sort((a, b) => a.order - b.order);
// modifiers have the ability to execute arbitrary code when Popper.js get inited
// such code is executed in the same order of its modifier
// they could add new properties to their options configuration
// BE AWARE: don't add options to `options.modifiers.name` but to `modifierOptions`!
this.modifiers.forEach(modifierOptions => {
if (modifierOptions.enabled && isFunction(modifierOptions.onLoad)) {
modifierOptions.onLoad(this.reference, this.popper, this.options, modifierOptions, this.state);
}
});
// fire the first update to position the popper in the right place
this.update();
const eventsEnabled = this.options.eventsEnabled;
if (eventsEnabled) {
// setup event listeners, they will take care of update the position in specific situations
this.enableEventListeners();
}
this.state.eventsEnabled = eventsEnabled;
}
// We can't use class properties because they don't get listed in the
// class prototype and break stuff like Sinon stubs
update() {
return update.call(this);
}
destroy() {
return destroy.call(this);
}
enableEventListeners() {
return enableEventListeners.call(this);
}
disableEventListeners() {
return disableEventListeners.call(this);
}
/**
* Schedules an update. It will run on the next UI update available.
* @method scheduleUpdate
* @memberof Popper
*/
/**
* Collection of utilities useful when writing custom modifiers.
* Starting from version 1.7, this method is available only if you
* include `popper-utils.js` before `popper.js`.
*
* **DEPRECATION**: This way to access PopperUtils is deprecated
* and will be removed in v2! Use the PopperUtils module directly instead.
* Due to the high instability of the methods contained in Utils, we can't
* guarantee them to follow semver. Use them at your own risk!
* @static
* @private
* @type {Object}
* @deprecated since version 1.8
* @member Utils
* @memberof Popper
*/
}
/**
* The `referenceObject` is an object that provides an interface compatible with Popper.js
* and lets you use it as replacement of a real DOM node.<br />
* You can use this method to position a popper relatively to a set of coordinates
* in case you don't have a DOM node to use as reference.
*
* ```
* new Popper(referenceObject, popperNode);
* ```
*
* NB: This feature isn't supported in Internet Explorer 10.
* @name referenceObject
* @property {Function} data.getBoundingClientRect
* A function that returns a set of coordinates compatible with the native `getBoundingClientRect` method.
* @property {number} data.clientWidth
* An ES6 getter that will return the width of the virtual reference element.
* @property {number} data.clientHeight
* An ES6 getter that will return the height of the virtual reference element.
*/
Popper.Utils = (typeof window !== 'undefined' ? window : global).PopperUtils;
Popper.placements = placements;
Popper.Defaults = Defaults;
export default Popper;
//# sourceMappingURL=popper.js.map
|
applyStyle
|
server.go
|
package main
|
"time"
"github.com/go-stack/stack"
"github.com/karlmutch/errors"
"google.golang.org/grpc"
"google.golang.org/grpc/health"
"google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/reflection"
timestamp "github.com/golang/protobuf/ptypes/timestamp"
echo "github.com/leaf-ai/platform-services/internal/gen/echosrv"
)
type echoServer struct {
echo.UnimplementedEchoServer
health *health.Server
}
func (*echoServer) Echo(ctx context.Context, in *echo.Request) (resp *echo.Response, err error) {
if in == nil {
return nil, fmt.Errorf("request is missing a message to echo")
}
return &echo.Response{
Message: in.Message,
DateTime: ×tamp.Timestamp{Seconds: time.Now().Unix()}}, nil
}
func (es *echoServer) Check(ctx context.Context, in *grpc_health_v1.HealthCheckRequest) (resp *grpc_health_v1.HealthCheckResponse, err error) {
return es.health.Check(ctx, in)
}
func (*echoServer) Watch(in *grpc_health_v1.HealthCheckRequest, server grpc_health_v1.Health_WatchServer) (err error) {
return errors.New(grpc_health_v1.HealthCheckResponse_UNKNOWN.String())
}
func runServer(ctx context.Context, serviceName string, port int) (errC chan errors.Error) {
errC = make(chan errors.Error, 3)
server := grpc.NewServer()
echoSrv := &echoServer{health: health.NewServer()}
echo.RegisterEchoServer(server, echoSrv)
grpc_health_v1.RegisterHealthServer(server, echoSrv)
reflection.Register(server)
netListen, err := net.Listen("tcp", fmt.Sprintf("127.0.0.1:%d", port))
if err != nil {
errC <- errors.Wrap(err).With("stack", stack.Trace().TrimRuntime())
return
}
go func() {
echoSrv.health.SetServingStatus(serviceName, grpc_health_v1.HealthCheckResponse_SERVING)
// serve API
if err := server.Serve(netListen); err != nil {
errC <- errors.Wrap(err).With("stack", stack.Trace().TrimRuntime())
}
echoSrv.health.SetServingStatus(serviceName, grpc_health_v1.HealthCheckResponse_NOT_SERVING)
func() {
defer recover()
close(errC)
}()
}()
go func() {
select {
case <-ctx.Done():
server.Stop()
}
}()
return errC
}
|
import (
"context"
"fmt"
"net"
|
bitcoin_de.ts
|
<TS language="de" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Rechtsklick zum Bearbeiten der Adresse oder der Bezeichnung</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Eine neue Adresse erstellen</translation>
</message>
<message>
<source>&New</source>
<translation>&Neu</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Ausgewählte Adresse in die Zwischenablage kopieren</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopieren</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Schließen</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>Adresse &kopieren</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Ausgewählte Adresse aus der Liste entfernen</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation>
</message>
<message>
<source>&Export</source>
<translation>E&xportieren</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Löschen</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Wählen Sie die Adresse aus, an die Sie Zetacoins überweisen möchten</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Wählen Sie die Adresse aus, über die Sie Zetacoins empfangen wollen</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Auswählen</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Zahlungsadressen</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Empfangsadressen</translation>
</message>
<message>
<source>These are your Zetacoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Dies sind Ihre Zetacoin-Adressen zum Tätigen von Überweisungen. Bitte prüfen Sie den Betrag und die Empfangsadresse, bevor Sie Zetacoins überweisen.</translation>
</message>
<message>
<source>These are your Zetacoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Dies sind Ihre Zetacoin-Adressen zum Empfangen von Zahlungen. Es wird empfohlen für jede Transaktion eine neue Empfangsadresse zu verwenden.</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>&Bezeichnung kopieren</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Editieren</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Addressliste exportieren</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagetrennte-Datei (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exportieren fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Beim Speichern der Adressliste nach %1 ist ein Fehler aufgetreten. Bitte versuchen Sie es erneut.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Passphrasendialog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Passphrase eingeben</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Neue Passphrase</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Neue Passphrase bestätigen</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Wallet verschlüsseln</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Dieser Vorgang benötigt Ihre Passphrase, um die Wallet zu entsperren.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Wallet entsperren</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Dieser Vorgang benötigt Ihre Passphrase, um die Wallet zu entschlüsseln.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Wallet entschlüsseln</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Passphrase ändern</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Wallet-Verschlüsselung bestätigen</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR ZETACOINS</b>!</source>
<translation>Warnung: Wenn Sie Ihre Wallet verschlüsseln und Ihre Passphrase verlieren, werden Sie <b>alle Ihre Zetacoins verlieren</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Sind Sie sich sicher, dass Sie Ihre Wallet verschlüsseln möchten?</translation>
</message>
<message>
<source>Zetacoin Core will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your zetacoins from being stolen by malware infecting your computer.</source>
<translation>Zetacoin Core wird jetzt beendet, um den Verschlüsselungsprozess abzuschließen. Vergessen Sie nicht, dass eine Wallet-Verschlüsselung nicht vollständig vor Diebstahl Ihrer Zetacoins durch Schadsoftware schützen kann, die Ihren Computer infiziert.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>WICHTIG: Alle vorherigen Wallet-Sicherungen sollten durch die neu erzeugte, verschlüsselte Wallet ersetzt werden. Aus Sicherheitsgründen werden vorherige Sicherungen der unverschlüsselten Wallet nutzlos, sobald Sie die neue, verschlüsselte Wallet verwenden.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Warnung: Die Feststelltaste ist aktiviert!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Wallet verschlüsselt</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Geben Sie die neue Passphrase für die Wallet ein.<br>Bitte benutzen Sie eine Passphrase bestehend aus <b>zehn oder mehr zufälligen Zeichen</b> oder <b>acht oder mehr Wörtern</b>.</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Geben Sie die alte und neue Wallet-Passphrase ein.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Wallet-Verschlüsselung fehlgeschlagen</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Die Wallet-Verschlüsselung ist aufgrund eines internen Fehlers fehlgeschlagen. Ihre Wallet wurde nicht verschlüsselt.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Die eingegebenen Passphrasen stimmen nicht überein.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Wallet-Entsperrung fehlgeschlagen</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Die eingegebene Passphrase zur Wallet-Entschlüsselung war nicht korrekt.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Wallet-Entschlüsselung fehlgeschlagen</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Die Wallet-Passphrase wurde erfolgreich geändert.</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>IP/Netmask</source>
<translation>IP/Netzmaske</translation>
</message>
<message>
<source>Banned Until</source>
<translation>Gesperrt bis</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Nachricht s&ignieren...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synchronisiere mit Netzwerk...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Übersicht</translation>
</message>
<message>
<source>Node</source>
<translation>Knoten</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Allgemeine Wallet-Übersicht anzeigen</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaktionen</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Transaktionsverlauf durchsehen</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Beenden</translation>
</message>
<message>
<source>Quit application</source>
<translation>Anwendung beenden</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Über &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Informationen über Qt anzeigen</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Konfiguration...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>Wallet &verschlüsseln...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>Wallet &sichern...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>Passphrase &ändern...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Zahlungsadressen...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Empfangsadressen...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>&URI öffnen...</translation>
</message>
|
<message>
<source>Importing blocks from disk...</source>
<translation>Importiere Blöcke von Datenträger...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindiziere Blöcke auf Datenträger...</translation>
</message>
<message>
<source>Send coins to a Zetacoin address</source>
<translation>Zetacoins an eine Zetacoin-Adresse überweisen</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Eine Wallet-Sicherungskopie erstellen und abspeichern</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ändert die Passphrase, die für die Wallet-Verschlüsselung benutzt wird</translation>
</message>
<message>
<source>&Debug window</source>
<translation>&Debugfenster</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Debugging- und Diagnosekonsole öffnen</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>Nachricht &verifizieren...</translation>
</message>
<message>
<source>Zetacoin</source>
<translation>Zetacoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Wallet</translation>
</message>
<message>
<source>&Send</source>
<translation>&Überweisen</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Empfangen</translation>
</message>
<message>
<source>Show information about Zetacoin Core</source>
<translation>Informationen über Zetacoin Core anzeigen</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Anzeigen / Verstecken</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Das Hauptfenster anzeigen oder verstecken</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Verschlüsselt die zu Ihrer Wallet gehörenden privaten Schlüssel</translation>
</message>
<message>
<source>Sign messages with your Zetacoin addresses to prove you own them</source>
<translation>Nachrichten signieren, um den Besitz Ihrer Zetacoin-Adressen zu beweisen</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Zetacoin addresses</source>
<translation>Nachrichten verifizieren, um sicherzustellen, dass diese mit den angegebenen Zetacoin-Adressen signiert wurden</translation>
</message>
<message>
<source>&File</source>
<translation>&Datei</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Einstellungen</translation>
</message>
<message>
<source>&Help</source>
<translation>&Hilfe</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Registerkartenleiste</translation>
</message>
<message>
<source>Zetacoin Core</source>
<translation>Zetacoin Core</translation>
</message>
<message>
<source>Request payments (generates QR codes and zetacoin: URIs)</source>
<translation>Zahlungen anfordern (erzeugt QR-Codes und "zetacoin:"-URIs)</translation>
</message>
<message>
<source>&About Zetacoin Core</source>
<translation>&Über Zetacoin Core</translation>
</message>
<message>
<source>Modify configuration options for Zetacoin Core</source>
<translation>Konfiguration von Zetacoin Core bearbeiten</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Liste verwendeter Zahlungsadressen und Bezeichnungen anzeigen</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Liste verwendeter Empfangsadressen und Bezeichnungen anzeigen</translation>
</message>
<message>
<source>Open a zetacoin: URI or payment request</source>
<translation>Eine "zetacoin:"-URI oder Zahlungsanforderung öffnen</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Kommandozeilenoptionen</translation>
</message>
<message>
<source>Show the Zetacoin Core help message to get a list with possible Zetacoin command-line options</source>
<translation>Zeige den "Zetacoin Core"-Hilfetext, um eine Liste mit möglichen Kommandozeilenoptionen zu erhalten</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to Zetacoin network</source>
<translation><numerusform>%n aktive Verbindung zum Zetacoin-Netzwerk</numerusform><numerusform>%n aktive Verbindungen zum Zetacoin-Netzwerk</numerusform></translation>
</message>
<message>
<source>No block source available...</source>
<translation>Keine Blockquelle verfügbar...</translation>
</message>
<message numerus="yes">
<source>Processed %n block(s) of transaction history.</source>
<translation><numerusform>%n Block des Transaktionsverlaufs verarbeitet.</numerusform><numerusform>%n Blöcke des Transaktionsverlaufs verarbeitet.</numerusform></translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n Stunde</numerusform><numerusform>%n Stunden</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n Tag</numerusform><numerusform>%n Tage</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n Woche</numerusform><numerusform>%n Wochen</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 und %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n Jahr</numerusform><numerusform>%n Jahre</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 im Rückstand</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Der letzte empfangene Block ist %1 alt.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transaktionen hiernach werden noch nicht angezeigt.</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
<message>
<source>Warning</source>
<translation>Warnung</translation>
</message>
<message>
<source>Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Up to date</source>
<translation>Auf aktuellem Stand</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Hole auf...</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Datum: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Betrag: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Typ: %1
</translation>
</message>
<message>
<source>Label: %1
</source>
<translation>Bezeichnung: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Adresse: %1
</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Gesendete Transaktion</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Eingehende Transaktion</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Wallet ist <b>verschlüsselt</b> und aktuell <b>entsperrt</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Wallet ist <b>verschlüsselt</b> und aktuell <b>gesperrt</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Netzwerkalarm</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Münzauswahl ("Coin Control")</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Anzahl:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byte:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Betrag:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorität:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Gebühr:</translation>
</message>
<message>
<source>Dust:</source>
<translation>"Dust":</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Abzüglich Gebühr:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wechselgeld:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>Alles (de)selektieren</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Baumansicht</translation>
</message>
<message>
<source>List mode</source>
<translation>Listenansicht</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Received with label</source>
<translation>Empfangen über Bezeichnung</translation>
</message>
<message>
<source>Received with address</source>
<translation>Empfangen über Adresse</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Bestätigungen</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bestätigt</translation>
</message>
<message>
<source>Priority</source>
<translation>Priorität</translation>
</message>
<message>
<source>Copy address</source>
<translation>Adresse kopieren</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Transaktions-ID kopieren</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Nicht ausgegebenen Betrag sperren</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Nicht ausgegebenen Betrag entsperren</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Anzahl kopieren</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Gebühr kopieren</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Abzüglich Gebühr kopieren</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Byte kopieren</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Priorität kopieren</translation>
</message>
<message>
<source>Copy dust</source>
<translation>"Dust" kopieren</translation>
</message>
<message>
<source>Copy change</source>
<translation>Wechselgeld kopieren</translation>
</message>
<message>
<source>highest</source>
<translation>am höchsten</translation>
</message>
<message>
<source>higher</source>
<translation>höher</translation>
</message>
<message>
<source>high</source>
<translation>hoch</translation>
</message>
<message>
<source>medium-high</source>
<translation>mittel-hoch</translation>
</message>
<message>
<source>medium</source>
<translation>mittel</translation>
</message>
<message>
<source>low-medium</source>
<translation>niedrig-mittel</translation>
</message>
<message>
<source>low</source>
<translation>niedrig</translation>
</message>
<message>
<source>lower</source>
<translation>niedriger</translation>
</message>
<message>
<source>lowest</source>
<translation>am niedrigsten</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 gesperrt)</translation>
</message>
<message>
<source>none</source>
<translation>keine</translation>
</message>
<message>
<source>This label turns red if the transaction size is greater than 1000 bytes.</source>
<translation>Diese Bezeichnung wird rot, wenn die Transaktion größer als 1000 Byte ist.</translation>
</message>
<message>
<source>This label turns red if the priority is smaller than "medium".</source>
<translation>Diese Bezeichnung wird rot, wenn die Priorität niedriger als "mittel" ist.</translation>
</message>
<message>
<source>This label turns red if any recipient receives an amount smaller than %1.</source>
<translation>Diese Bezeichnung wird rot, wenn irgendein Empfänger einen Betrag kleiner als %1 erhält.</translation>
</message>
<message>
<source>Can vary +/- %1 satoshi(s) per input.</source>
<translation>Kann pro Eingabe um +/- %1 Satoshi(s) abweichen.</translation>
</message>
<message>
<source>yes</source>
<translation>ja</translation>
</message>
<message>
<source>no</source>
<translation>nein</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Das bedeutet, dass eine Gebühr von mindestens %1 pro kB erforderlich ist.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Kann um +/- 1 Byte pro Eingabe variieren.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transaktionen mit höherer Priorität haben eine größere Chance in einen Block aufgenommen zu werden.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>Wechselgeld von %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(Wechselgeld)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Adresse bearbeiten</translation>
</message>
<message>
<source>&Label</source>
<translation>&Bezeichnung</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>Bezeichnung, die dem Adresslisteneintrag zugeordnet ist.</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Adresse, die dem Adresslisteneintrag zugeordnet ist. Diese kann nur bei Zahlungsadressen verändert werden.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adresse</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Neue Empfangsadresse</translation>
</message>
<message>
<source>New sending address</source>
<translation>Neue Zahlungsadresse</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Empfangsadresse bearbeiten</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Zahlungsadresse bearbeiten</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Die eingegebene Adresse "%1" befindet sich bereits im Adressbuch.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Zetacoin address.</source>
<translation>Die eingegebene Adresse "%1" ist keine gültige Zetacoin-Adresse.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Wallet konnte nicht entsperrt werden.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Erzeugung eines neuen Schlüssels fehlgeschlagen.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Es wird ein neues Datenverzeichnis angelegt.</translation>
</message>
<message>
<source>name</source>
<translation>Name</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Verzeichnis existiert bereits. Fügen Sie %1 an, wenn Sie beabsichtigen hier ein neues Verzeichnis anzulegen.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Pfad existiert bereits und ist kein Verzeichnis.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Datenverzeichnis kann hier nicht angelegt werden.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Zetacoin Core</source>
<translation>Zetacoin Core</translation>
</message>
<message>
<source>version</source>
<translation>Version</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-Bit)</translation>
</message>
<message>
<source>About Zetacoin Core</source>
<translation>Über Zetacoin Core</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Kommandozeilenoptionen</translation>
</message>
<message>
<source>Usage:</source>
<translation>Benutzung:</translation>
</message>
<message>
<source>command-line options</source>
<translation>Kommandozeilenoptionen</translation>
</message>
<message>
<source>UI Options:</source>
<translation>UI Einstellungen:</translation>
</message>
<message>
<source>Choose data directory on startup (default: %u)</source>
<translation>Datenverzeichnis beim Starten auswählen (Standard: %u)</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Sprache einstellen, zum Beispiel "de_DE" (default: system locale)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Minimiert starten</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>SSL-Wurzelzertifikate für Zahlungsanforderungen festlegen (Standard: -system-)</translation>
</message>
<message>
<source>Show splash screen on startup (default: %u)</source>
<translation>Startbildschirm beim Starten anzeigen (Standard: %u)</translation>
</message>
<message>
<source>Reset all settings changes made over the GUI</source>
<translation>Setze alle Einstellungen zurück, die über die grafische Oberfläche geändert wurden.</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Willkommen</translation>
</message>
<message>
<source>Welcome to Zetacoin Core.</source>
<translation>Willkommen zu Zetacoin Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Zetacoin Core will store its data.</source>
<translation>Da Sie das Programm gerade zum ersten Mal starten, können Sie nun auswählen wo Zetacoin Core seine Daten ablegen soll.</translation>
</message>
<message>
<source>Zetacoin Core will download and store a copy of the Zetacoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Zetacoin Core wird eine Kopie der Blockkette herunterladen und speichern. Mindestens %1GB Daten werden in diesem Verzeichnis abgelegt und die Datenmenge wächst über die Zeit an. Auch die Wallet wird in diesem Verzeichnis abgelegt.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Standard-Datenverzeichnis verwenden</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Ein benutzerdefiniertes Datenverzeichnis verwenden:</translation>
</message>
<message>
<source>Zetacoin Core</source>
<translation>Zetacoin Core</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Fehler: Angegebenes Datenverzeichnis "%1" kann nicht angelegt werden.</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n GB freier Speicherplatz verfügbar</numerusform><numerusform>%n GB freier Speicherplatz verfügbar</numerusform></translation>
</message>
<message numerus="yes">
<source>(of %n GB needed)</source>
<translation><numerusform>(von benötigtem %n GB)</numerusform><numerusform>(von benötigten %n GB)</numerusform></translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>URI öffnen</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Zahlungsanforderung über URI oder aus Datei öffnen</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Zahlungsanforderungsdatei auswählen</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Zu öffnende Zahlungsanforderungsdatei auswählen</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Konfiguration</translation>
</message>
<message>
<source>&Main</source>
<translation>&Allgemein</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Größe des &Datenbankcaches</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Anzahl an Skript-&Verifizierungs-Threads</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Eingehende Verbindungen annehmen</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Erlaubt eingehende Verbindungen</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>IP-Adresse des Proxies (z.B. IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Exit in the menu.</source>
<translation>Minimiert die Anwendung anstatt sie zu beenden wenn das Fenster geschlossen wird. Wenn dies aktiviert ist, müssen Sie die Anwendung über "Beenden" im Menü schließen.</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting Zetacoin Core.</source>
<translation>Legt die Sprache der Benutzeroberfläche fest. Diese Einstellung wird erst nach einem Neustart von Zetacoin Core aktiv.</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>Externe URLs (z.B. ein Block-Explorer), die im Kontextmenü des Transaktionsverlaufs eingefügt werden. In der URL wird %s durch den Transaktionshash ersetzt. Bei Angabe mehrerer URLs müssen diese durch "|" voneinander getrennt werden.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>Externe Transaktions-URLs</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Aktive Kommandozeilenoptionen, die obige Konfiguration überschreiben:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Setzt die Clientkonfiguration auf Standardwerte zurück.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>Konfiguration &zurücksetzen</translation>
</message>
<message>
<source>&Network</source>
<translation>&Netzwerk</translation>
</message>
<message>
<source>Automatically start Zetacoin Core after logging in to the system.</source>
<translation>Zetacoin Core nach der Anmeldung am System automatisch starten.</translation>
</message>
<message>
<source>&Start Zetacoin Core on system login</source>
<translation>&Zetacoin Core nach Systemanmeldung starten</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = automatisch, <0 = so viele Kerne frei lassen)</translation>
</message>
<message>
<source>W&allet</source>
<translation>W&allet</translation>
</message>
<message>
<source>Expert</source>
<translation>Erweiterte Wallet-Optionen</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>"&Coin Control"-Funktionen aktivieren</translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Wenn Sie das Ausgeben von unbestätigtem Wechselgeld deaktivieren, kann das Wechselgeld einer Transaktion nicht verwendet werden, bis es mindestens eine Bestätigung erhalten hat. Dies wirkt sich auf die Berechnung des Kontostands aus.</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Unbestätigtes Wechselgeld darf ausgegeben werden</translation>
</message>
<message>
<source>Automatically open the Zetacoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Automatisch den Zetacoin-Clientport auf dem Router öffnen. Dies funktioniert nur, wenn Ihr Router UPnP unterstützt und dies aktiviert ist.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Portweiterleitung via &UPnP</translation>
</message>
<message>
<source>Connect to the Zetacoin network through a SOCKS5 proxy.</source>
<translation>Über einen SOCKS5-Proxy mit dem Zetacoin-Netzwerk verbinden.</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>Über einen SOCKS5-Proxy &verbinden (Standardproxy):</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Proxy-&IP:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port des Proxies (z.B. 9050)</translation>
</message>
<message>
<source>Used for reaching peers via:</source>
<translation>Benutzt um Gegenstellen zu erreichen über:</translation>
</message>
<message>
<source>Shows, if the supplied default SOCKS5 proxy is used to reach peers via this network type.</source>
<translation>Zeigt an, ob der eingegebene Standard SOCKS5 Proxy genutzt wird um Peers mit dem Netzwerktyp zu erreichen.</translation>
</message>
<message>
<source>IPv4</source>
<translation>IPv4</translation>
</message>
<message>
<source>IPv6</source>
<translation>IPv6</translation>
</message>
<message>
<source>Tor</source>
<translation>Tor</translation>
</message>
<message>
<source>Connect to the Zetacoin network through a separate SOCKS5 proxy for Tor hidden services.</source>
<translation>Über einen separaten SOCKS5 Proxy für Tor Services mit dem Zetacoint Netzwerk verbinden.</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services:</source>
<translation>Separaten SOCKS5-Proxy verwenden, um Gegenstellen über versteckte Tor-Dienste zu erreichen:</translation>
</message>
<message>
<source>&Window</source>
<translation>&Programmfenster</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Nur ein Symbol im Infobereich anzeigen, nachdem das Programmfenster minimiert wurde.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>In den Infobereich anstatt in die Taskleiste &minimieren</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>Beim Schließen m&inimieren</translation>
</message>
<message>
<source>&Display</source>
<translation>Anzei&ge</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Sprache der Benutzeroberfläche:</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Einheit der Beträge:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Wählen Sie die standardmäßige Untereinheit, die in der Benutzeroberfläche und beim Überweisen von Zetacoins angezeigt werden soll.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Legt fest, ob die "Coin Control"-Funktionen angezeigt werden.</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>A&bbrechen</translation>
</message>
<message>
<source>default</source>
<translation>Standard</translation>
</message>
<message>
<source>none</source>
<translation>keine</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Zurücksetzen der Konfiguration bestätigen</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Clientneustart nötig, um die Änderungen zu aktivieren.</translation>
</message>
<message>
<source>Client will be shut down. Do you want to proceed?</source>
<translation>Client wird beendet. Möchten Sie den Vorgang fortsetzen?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Diese Änderung würde einen Clientneustart benötigen.</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>Die eingegebene Proxyadresse ist ungültig.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formular</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Zetacoin network after a connection is established, but this process has not completed yet.</source>
<translation>Die angezeigten Informationen sind möglicherweise nicht mehr aktuell. Ihre Wallet wird automatisch synchronisiert, nachdem eine Verbindung zum Zetacoin-Netzwerk hergestellt wurde. Dieser Prozess ist jedoch derzeit noch nicht abgeschlossen.</translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Beobachtet:</translation>
</message>
<message>
<source>Available:</source>
<translation>Verfügbar:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Ihr aktuell verfügbarer Kontostand</translation>
</message>
<message>
<source>Pending:</source>
<translation>Ausstehend:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Betrag aus unbestätigten Transaktionen, der noch nicht im aktuell verfügbaren Kontostand enthalten ist</translation>
</message>
<message>
<source>Immature:</source>
<translation>Unreif:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Erarbeiteter Betrag der noch nicht gereift ist</translation>
</message>
<message>
<source>Balances</source>
<translation>Kontostände</translation>
</message>
<message>
<source>Total:</source>
<translation>Gesamtbetrag:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Aktueller Gesamtbetrag aus obigen Kategorien</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>Ihr aktueller Kontostand beobachteter Adressen</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Verfügbar:</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Letzte Transaktionen</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Unbestätigte Transaktionen von beobachteten Adressen</translation>
</message>
<message>
<source>Mined balance in watch-only addresses that has not yet matured</source>
<translation>Erarbeiteter Betrag in beobachteten Adressen der noch nicht gereift ist</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Aktueller Gesamtbetrag in beobachteten Adressen aus obigen Kategorien</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>URI-Verarbeitung</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Ungültige Zahlungsadresse %1</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>Zahlungsanforderung abgelehnt</translation>
</message>
<message>
<source>Payment request network doesn't match client network.</source>
<translation>Netzwerk der Zahlungsanforderung stimmt nicht mit dem Client-Netzwerk überein.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>Zahlungsanforderung ist nicht initialisiert.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Angeforderter Zahlungsbetrag in Höhe von %1 ist zu niedrig und wurde als "Dust" eingestuft.</translation>
</message>
<message>
<source>Payment request error</source>
<translation>fehlerhafte Zahlungsanforderung</translation>
</message>
<message>
<source>Cannot start zetacoin: click-to-pay handler</source>
<translation>"zetacoin: Klicken-zum-Bezahlen"-Handler konnte nicht gestartet werden</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Abruf-URL der Zahlungsanforderung ist ungültig: %1</translation>
</message>
<message>
<source>URI cannot be parsed! This can be caused by an invalid Zetacoin address or malformed URI parameters.</source>
<translation>URI kann nicht analysiert werden! Dies kann durch eine ungültige Zetacoin-Adresse oder fehlerhafte URI-Parameter verursacht werden.</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Zahlungsanforderungsdatei-Verarbeitung</translation>
</message>
<message>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>Zahlungsanforderungsdatei kann nicht gelesen werden! Dies kann durch eine ungültige Zahlungsanforderungsdatei verursacht werden.</translation>
</message>
<message>
<source>Payment request expired.</source>
<translation>Zahlungsanforderung abgelaufen.</translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Unverifizierte Zahlungsanforderungen an benutzerdefinierte Zahlungsskripte werden nicht unterstützt.</translation>
</message>
<message>
<source>Invalid payment request.</source>
<translation>Ungültige Zahlungsanforderung.</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Rücküberweisung von %1</translation>
</message>
<message>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>Zahlungsanforderung %1 ist zu groß (%2 Byte, erlaubt sind %3 Byte).</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Kommunikationsfehler mit %1: %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>Zahlungsanforderung kann nicht verarbeitet werden!</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Fehlerhafte Antwort vom Server: %1</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Zahlung bestätigt</translation>
</message>
<message>
<source>Network request error</source>
<translation>fehlerhafte Netzwerkanfrage</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>User Agent</source>
<translation>User-Agent</translation>
</message>
<message>
<source>Node/Service</source>
<translation>Knoten/Dienst</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingzeit</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Enter a Zetacoin address (e.g. %1)</source>
<translation>Zetacoin-Adresse eingeben (z.B. %1)</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>None</source>
<translation>Keine</translation>
</message>
<message>
<source>N/A</source>
<translation>k.A.</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>Grafik &speichern...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>Grafik &kopieren</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>QR-Code speichern</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>PNG-Grafik (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Clientname</translation>
</message>
<message>
<source>N/A</source>
<translation>k.A.</translation>
</message>
<message>
<source>Client version</source>
<translation>Clientversion</translation>
</message>
<message>
<source>&Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Debug window</source>
<translation>Debugfenster</translation>
</message>
<message>
<source>General</source>
<translation>Allgemein</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Verwendete OpenSSL-Version</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Verwendete BerkeleyDB-Version</translation>
</message>
<message>
<source>Startup time</source>
<translation>Startzeit</translation>
</message>
<message>
<source>Network</source>
<translation>Netzwerk</translation>
</message>
<message>
<source>Name</source>
<translation>Name</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Anzahl Verbindungen</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blockkette</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Aktuelle Anzahl Blöcke</translation>
</message>
<message>
<source>Memory Pool</source>
<translation>Speicherpool</translation>
</message>
<message>
<source>Current number of transactions</source>
<translation>Aktuelle Anzahl der Transaktionen</translation>
</message>
<message>
<source>Memory usage</source>
<translation>Speichernutzung</translation>
</message>
<message>
<source>Open the Zetacoin Core debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Öffnet die "Zetacoin Core"-Debugprotokolldatei aus dem aktuellen Datenverzeichnis. Dies kann bei großen Protokolldateien einige Sekunden dauern.</translation>
</message>
<message>
<source>Received</source>
<translation>Empfangen</translation>
</message>
<message>
<source>Sent</source>
<translation>Übertragen</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Gegenstellen</translation>
</message>
<message>
<source>Banned peers</source>
<translation>Gesperrte Peers</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Gegenstelle auswählen, um detaillierte Informationen zu erhalten.</translation>
</message>
<message>
<source>Whitelisted</source>
<translation>Zugelassene</translation>
</message>
<message>
<source>Direction</source>
<translation>Richtung</translation>
</message>
<message>
<source>Version</source>
<translation>Version</translation>
</message>
<message>
<source>Starting Block</source>
<translation>Start Block</translation>
</message>
<message>
<source>Synced Headers</source>
<translation>Synchronisierte Kopfdaten</translation>
</message>
<message>
<source>Synced Blocks</source>
<translation>Synchronisierte Blöcke</translation>
</message>
<message>
<source>User Agent</source>
<translation>User-Agent</translation>
</message>
<message>
<source>Services</source>
<translation>Dienste</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Sperrpunktzahl</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Verbindungsdauer</translation>
</message>
<message>
<source>Last Send</source>
<translation>Letzte Übertragung</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Letzter Empfang</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingzeit</translation>
</message>
<message>
<source>The duration of a currently outstanding ping.</source>
<translation>Die Laufzeit eines aktuell ausstehenden Ping.</translation>
</message>
<message>
<source>Ping Wait</source>
<translation>Ping Wartezeit</translation>
</message>
<message>
<source>Time Offset</source>
<translation>Zeitversatz</translation>
</message>
<message>
<source>Last block time</source>
<translation>Letzte Blockzeit</translation>
</message>
<message>
<source>&Open</source>
<translation>&Öffnen</translation>
</message>
<message>
<source>&Console</source>
<translation>&Konsole</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Netzwerkauslastung</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>Totals</source>
<translation>Gesamtbetrag:</translation>
</message>
<message>
<source>In:</source>
<translation>eingehend:</translation>
</message>
<message>
<source>Out:</source>
<translation>ausgehend:</translation>
</message>
<message>
<source>Build date</source>
<translation>Erstellungsdatum</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Debugprotokolldatei</translation>
</message>
<message>
<source>Clear console</source>
<translation>Konsole zurücksetzen</translation>
</message>
<message>
<source>&Disconnect Node</source>
<translation>Knoten &trennen</translation>
</message>
<message>
<source>Ban Node for</source>
<translation>Knoten gebannt für</translation>
</message>
<message>
<source>1 &hour</source>
<translation>1 &Stunde</translation>
</message>
<message>
<source>1 &day</source>
<translation>1 &Tag</translation>
</message>
<message>
<source>1 &week</source>
<translation>1 &Woche</translation>
</message>
<message>
<source>1 &year</source>
<translation>1 &Jahr</translation>
</message>
<message>
<source>&Unban Node</source>
<translation>&Node entsperren</translation>
</message>
<message>
<source>Welcome to the Zetacoin Core RPC console.</source>
<translation>Willkommen in der "Zetacoin Core"-RPC-Konsole.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Pfeiltaste hoch und runter, um den Verlauf durchzublättern und <b>Strg-L</b>, um die Konsole zurückzusetzen.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Bitte <b>help</b> eingeben, um eine Übersicht verfügbarer Befehle zu erhalten.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>(node id: %1)</source>
<translation>(Knotenkennung: %1)</translation>
</message>
<message>
<source>via %1</source>
<translation>über %1</translation>
</message>
<message>
<source>never</source>
<translation>nie</translation>
</message>
<message>
<source>Inbound</source>
<translation>eingehend</translation>
</message>
<message>
<source>Outbound</source>
<translation>ausgehend</translation>
</message>
<message>
<source>Yes</source>
<translation>Ja</translation>
</message>
<message>
<source>No</source>
<translation>Nein</translation>
</message>
<message>
<source>Unknown</source>
<translation>Unbekannt</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Betrag:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Bezeichnung:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Nachricht:</translation>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Eine der bereits verwendeten Empfangsadressen wiederverwenden. Addressen wiederzuverwenden birgt Sicherheits- und Datenschutzrisiken. Außer zum Neuerstellen einer bereits erzeugten Zahlungsanforderung sollten Sie dies nicht nutzen.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Vorhandene Empfangsadresse &wiederverwenden (nicht empfohlen)</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Zetacoin network.</source>
<translation>Eine optionale Nachricht, die an die Zahlungsanforderung angehängt wird. Sie wird angezeigt, wenn die Anforderung geöffnet wird. Hinweis: Diese Nachricht wird nicht mit der Zahlung über das Zetacoin-Netzwerk gesendet.</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>Eine optionale Bezeichnung, die der neuen Empfangsadresse zugeordnet wird.</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Verwenden Sie dieses Formular, um Zahlungen anzufordern. Alle Felder sind <b>optional</b>.</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Ein optional angeforderte Betrag. Lassen Sie dieses Feld leer oder setzen Sie es auf 0, um keinen spezifischen Betrag anzufordern.</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Alle Formularfelder zurücksetzen.</translation>
</message>
<message>
<source>Clear</source>
<translation>Zurücksetzen</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Verlauf der angeforderten Zahlungen</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Zahlung anfordern</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Ausgewählte Zahlungsanforderungen anzeigen (entspricht einem Doppelklick auf einen Eintrag)</translation>
</message>
<message>
<source>Show</source>
<translation>Anzeigen</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Ausgewählte Einträge aus der Liste entfernen</translation>
</message>
<message>
<source>Remove</source>
<translation>Entfernen</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy message</source>
<translation>Nachricht kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR-Code</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>&URI kopieren</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>&Addresse kopieren</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>Grafik &speichern...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Zahlung anfordern an %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Zahlungsinformationen</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resultierende URI ist zu lang, bitte den Text für Bezeichnung/Nachricht kürzen.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Beim Enkodieren der URI in den QR-Code ist ein Fehler aufgetreten.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(keine Nachricht)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(kein Betrag)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Zetacoins überweisen</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>"Coin Control"-Funktionen</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Eingaben...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>automatisch ausgewählt</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Unzureichender Kontostand!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Anzahl:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byte:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Betrag:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorität:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Gebühr:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Abzüglich Gebühr:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wechselgeld:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Wenn dies aktivert, und die Wechselgeld-Adresse leer oder ungültig ist, wird das Wechselgeld einer neu erzeugten Adresse gutgeschrieben.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Benutzerdefinierte Wechselgeld-Adresse</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Transaktionsgebühr:</translation>
</message>
<message>
<source>Choose...</source>
<translation>Auswählen...</translation>
</message>
<message>
<source>collapse fee-settings</source>
<translation>Transaktionsgebühreneinstellungen ausblenden</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>pro Kilobyte</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "total at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Wenn die benutzerdefinierte Gebühr 1000 Satoshis beträgt und die Transaktion nur 250 Byte groß ist, wird bei Auswahl von "pro Kilobyte" eine Gebühr in Höhe von 250 Satoshis, bei Auswahl von "Mindestbetrag" eine Gebühr in Höhe von 1000 Satoshis bezahlt. Bei Transaktionen die Größer als ein Kilobyte sind, werden bei beiden Optionen die Gebühren pro Kilobyte bezahlt.</translation>
</message>
<message>
<source>Hide</source>
<translation>Ausblenden</translation>
</message>
<message>
<source>total at least</source>
<translation>Mindestbetrag</translation>
</message>
<message>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks. But be aware that this can end up in a never confirming transaction once there is more demand for zetacoin transactions than the network can process.</source>
<translation>Nur die minimale Gebühr zu bezahlen ist so lange in Ordnung, wie weniger Transaktionsvolumen als Platz in den Blöcken vorhanden ist. Aber Vorsicht, diese Option kann dazu führen, dass Transaktionen nicht bestätigt werden, wenn mehr Bedarf an Zetacoin-Transaktionen besteht als das Netzwerk verarbeiten kann.</translation>
</message>
<message>
<source>(read the tooltip)</source>
<translation>(den Hinweistext lesen)</translation>
</message>
<message>
<source>Recommended:</source>
<translation>Empfehlungen:</translation>
</message>
<message>
<source>Custom:</source>
<translation>Benutzerdefiniert:</translation>
</message>
<message>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(Intelligente Gebührenlogik ist noch nicht verfügbar. Normalerweise dauert dies einige Blöcke lang...)</translation>
</message>
<message>
<source>Confirmation time:</source>
<translation>Bestätigungszeit:</translation>
</message>
<message>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<source>fast</source>
<translation>schnell</translation>
</message>
<message>
<source>Send as zero-fee transaction if possible</source>
<translation>Wenn möglich als gebührenfreie Transaktion senden</translation>
</message>
<message>
<source>(confirmation may take longer)</source>
<translation>(Bestätigung kann länger dauern)</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>An mehrere Empfänger auf einmal überweisen</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Empfänger &hinzufügen</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Alle Formularfelder zurücksetzen.</translation>
</message>
<message>
<source>Dust:</source>
<translation>"Dust":</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>Balance:</source>
<translation>Kontostand:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Überweisung bestätigen</translation>
</message>
<message>
<source>S&end</source>
<translation>&Überweisen</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Überweisung bestätigen</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 an %2</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Anzahl kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Gebühr kopieren</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Abzüglich Gebühr kopieren</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Byte kopieren</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Priorität kopieren</translation>
</message>
<message>
<source>Copy change</source>
<translation>Wechselgeld kopieren</translation>
</message>
<message>
<source>Total Amount %1</source>
<translation>Gesamtbetrag %1</translation>
</message>
<message>
<source>or</source>
<translation>oder</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Der zu zahlende Betrag muss größer als 0 sein.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Der angegebene Betrag übersteigt Ihren Kontostand.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Der angegebene Betrag übersteigt aufgrund der Transaktionsgebühr in Höhe von %1 Ihren Kontostand.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Transaktionserstellung fehlgeschlagen!</translation>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Die Transaktion wurde abgelehnt! Dies kann passieren, wenn einige Zetacoins aus Ihrer Wallet bereits ausgegeben wurden. Beispielsweise weil Sie eine Kopie Ihrer wallet.dat genutzt, die Zetacoins dort ausgegeben haben und dies daher in der derzeit aktiven Wallet nicht vermerkt ist.</translation>
</message>
<message>
<source>A fee higher than %1 is considered an absurdly high fee.</source>
<translation>Eine höhere Gebühr als %1 wird als unsinnig hohe Gebühr angesehen.</translation>
</message>
<message>
<source>Payment request expired.</source>
<translation>Zahlungsanforderung abgelaufen.</translation>
</message>
<message>
<source>Pay only the required fee of %1</source>
<translation>Nur die notwendige Gebühr in Höhe von %1 zahlen</translation>
</message>
<message numerus="yes">
<source>Estimated to begin confirmation within %n block(s).</source>
<translation><numerusform>Voraussichtlicher Beginn der Bestätigung innerhalb von %n Block.</numerusform><numerusform>Voraussichtlicher Beginn der Bestätigung innerhalb von %n Blöcken.</numerusform></translation>
</message>
<message>
<source>The recipient address is not valid. Please recheck.</source>
<translation>Die Zahlungsadresse ist ungültig, bitte nochmals überprüfen.</translation>
</message>
<message>
<source>Duplicate address found: addresses should only be used once each.</source>
<translation>Doppelte Adresse entdeckt: Adressen dürfen jeweils nur einmal vorkommen.</translation>
</message>
<message>
<source>Warning: Invalid Zetacoin address</source>
<translation>Warnung: Ungültige Zetacoin-Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Warnung: Unbekannte Wechselgeld-Adresse</translation>
</message>
<message>
<source>Copy dust</source>
<translation>"Dust" kopieren</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Wollen Sie die Überweisung ausführen?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>als Transaktionsgebühr hinzugefügt</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Betra&g:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>E&mpfänger:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Adressbezeichnung eingeben (diese wird zusammen mit der Adresse dem Adressbuch hinzugefügt)</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Bezeichnung:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Bereits verwendete Adresse auswählen</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Dies ist eine normale Überweisung.</translation>
</message>
<message>
<source>The Zetacoin address to send the payment to</source>
<translation>Die Zahlungsadresse der Überweisung</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Adresse aus der Zwischenablage einfügen</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Diesen Eintrag entfernen</translation>
</message>
<message>
<source>The fee will be deducted from the amount being sent. The recipient will receive less zetacoins than you enter in the amount field. If multiple recipients are selected, the fee is split equally.</source>
<translation>Die Gebühr wird vom zu überweisenden Betrag abgezogen. Der Empfänger wird also weniger Zetacoins erhalten, als Sie im Betrags-Feld eingegeben haben. Falls mehrere Empfänger ausgewählt wurden, wird die Gebühr gleichmäßig verteilt.</translation>
</message>
<message>
<source>S&ubtract fee from amount</source>
<translation>Gebühr vom Betrag ab&ziehen</translation>
</message>
<message>
<source>Message:</source>
<translation>Nachricht:</translation>
</message>
<message>
<source>This is an unauthenticated payment request.</source>
<translation>Dies ist keine beglaubigte Zahlungsanforderung.</translation>
</message>
<message>
<source>This is an authenticated payment request.</source>
<translation>Dies ist eine beglaubigte Zahlungsanforderung.</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Adressbezeichnung eingeben, die dann zusammen mit der Adresse der Liste bereits verwendeter Adressen hinzugefügt wird.</translation>
</message>
<message>
<source>A message that was attached to the zetacoin: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Zetacoin network.</source>
<translation>Eine an die "zetacoin:"-URI angefügte Nachricht, die zusammen mit der Transaktion gespeichert wird. Hinweis: Diese Nachricht wird nicht über das Zetacoin-Netzwerk gesendet.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Empfänger:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memo:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Zetacoin Core is shutting down...</source>
<translation>Zetacoin Core wird beendet...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Fahren Sie den Computer nicht herunter, bevor dieses Fenster verschwindet.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signaturen - eine Nachricht signieren / verifizieren</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>Nachricht &signieren</translation>
</message>
<message>
<source>You can sign messages/agreements with your addresses to prove you can receive zetacoins sent to them. Be careful not to sign anything vague or random, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Sie können Nachrichten/Vereinbarungen mit Hilfe Ihrer Adressen signieren, um zu beweisen, dass Sie Zetacoins empfangen können, die an diese Adressen überwiesen werden. Seien Sie vorsichtig und signieren Sie nichts Vages oder Willkürliches, um Ihre Indentität vor Phishingangriffen zu schützen. Signieren Sie nur vollständig-detaillierte Aussagen, mit denen Sie auch einverstanden sind.</translation>
</message>
<message>
<source>The Zetacoin address to sign the message with</source>
<translation>Die Zetacoin-Adresse mit der die Nachricht signiert wird</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Bereits verwendete Adresse auswählen</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Adresse aus der Zwischenablage einfügen</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Zu signierende Nachricht hier eingeben</translation>
</message>
<message>
<source>Signature</source>
<translation>Signatur</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Aktuelle Signatur in die Zwischenablage kopieren</translation>
</message>
<message>
<source>Sign the message to prove you own this Zetacoin address</source>
<translation>Die Nachricht signieren, um den Besitz dieser Zetacoin-Adresse zu beweisen</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>&Nachricht signieren</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Alle "Nachricht signieren"-Felder zurücksetzen</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>Nachricht &verifizieren</translation>
</message>
<message>
<source>Enter the receiver's address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack. Note that this only proves the signing party receives with the address, it cannot prove sendership of any transaction!</source>
<translation>Geben Sie die Zahlungsadresse des Empfängers, Nachricht (achten Sie darauf Zeilenumbrüche, Leerzeichen, Tabulatoren usw. exakt zu kopieren) und Signatur unten ein, um die Nachricht zu verifizieren. Vorsicht, interpretieren Sie nicht mehr in die Signatur hinein, als in der signierten Nachricht selber enthalten ist, um nicht von einem Man-in-the-middle-Angriff hinters Licht geführt zu werden. Beachten Sie dass dies nur beweißt, dass die signierende Partei über diese Adresse Überweisungen empfangen kann.</translation>
</message>
<message>
<source>The Zetacoin address the message was signed with</source>
<translation>Die Zetacoin-Adresse mit der die Nachricht signiert wurde</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Zetacoin address</source>
<translation>Die Nachricht verifizieren, um sicherzustellen, dass diese mit der angegebenen Zetacoin-Adresse signiert wurde</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>&Nachricht verifizieren</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Alle "Nachricht verifizieren"-Felder zurücksetzen</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Auf "Nachricht signieren" klicken, um die Signatur zu erzeugen</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Die eingegebene Adresse ist ungültig.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Bitte überprüfen Sie die Adresse und versuchen Sie es erneut.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Die eingegebene Adresse verweist nicht auf einen Schlüssel.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Wallet-Entsperrung wurde abgebrochen.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privater Schlüssel zur eingegebenen Adresse ist nicht verfügbar.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Signierung der Nachricht fehlgeschlagen.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Nachricht signiert.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Die Signatur konnte nicht dekodiert werden.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Bitte überprüfen Sie die Signatur und versuchen Sie es erneut.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>Die Signatur entspricht nicht dem "Message Digest".</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Verifikation der Nachricht fehlgeschlagen.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Nachricht verifiziert.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Zetacoin Core</source>
<translation>Zetacoin Core</translation>
</message>
<message>
<source>The Zetacoin Core developers</source>
<translation>Die "Zetacoin Core"-Entwickler</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[Testnetz]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Offen bis %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>in Konflikt stehend</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/unbestätigt</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 Bestätigungen</translation>
</message>
<message>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, über %n Knoten übertragen</numerusform><numerusform>, über %n Knoten übertragen</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Source</source>
<translation>Quelle</translation>
</message>
<message>
<source>Generated</source>
<translation>Erzeugt</translation>
</message>
<message>
<source>From</source>
<translation>Von</translation>
</message>
<message>
<source>To</source>
<translation>An</translation>
</message>
<message>
<source>own address</source>
<translation>eigene Adresse</translation>
</message>
<message>
<source>watch-only</source>
<translation>beobachtet</translation>
</message>
<message>
<source>label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Credit</source>
<translation>Gutschrift</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>reift noch %n weiteren Block</numerusform><numerusform>reift noch %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>nicht angenommen</translation>
</message>
<message>
<source>Debit</source>
<translation>Belastung</translation>
</message>
<message>
<source>Total debit</source>
<translation>Gesamtbelastung</translation>
</message>
<message>
<source>Total credit</source>
<translation>Gesamtgutschrift</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Transaktionsgebühr</translation>
</message>
<message>
<source>Net amount</source>
<translation>Nettobetrag</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Comment</source>
<translation>Kommentar</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transaktions-ID</translation>
</message>
<message>
<source>Merchant</source>
<translation>Händler</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Erzeugte Zetacoins müssen %1 Blöcke lang reifen, bevor sie ausgegeben werden können. Als Sie diesen Block erzeugten, wurde er an das Netzwerk übertragen, um ihn der Blockkette hinzuzufügen. Falls dies fehlschlägt wird der Status in "nicht angenommen" geändert und Sie werden keine Zetacoins gutgeschrieben bekommen. Das kann gelegentlich passieren, wenn ein anderer Knoten einen Block fast zeitgleich erzeugt.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Debuginformationen</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transaktion</translation>
</message>
<message>
<source>Inputs</source>
<translation>Eingaben</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>true</source>
<translation>wahr</translation>
</message>
<message>
<source>false</source>
<translation>falsch</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, wurde noch nicht erfolgreich übertragen</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>unknown</source>
<translation>unbekannt</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Transaktionsdetails</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Dieser Bereich zeigt eine detaillierte Beschreibung der Transaktion an</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Unreif (%1 Bestätigungen, wird verfügbar sein nach %2)</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Offen bis %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Bestätigt (%1 Bestätigungen)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Dieser Block wurde von keinem anderen Knoten empfangen und wird wahrscheinlich nicht angenommen werden!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Erzeugt, jedoch nicht angenommen</translation>
</message>
<message>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Unbestätigt</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Wird bestätigt (%1 von %2 empfohlenen Bestätigungen)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>in Konflikt stehend</translation>
</message>
<message>
<source>Received with</source>
<translation>Empfangen über</translation>
</message>
<message>
<source>Received from</source>
<translation>Empfangen von</translation>
</message>
<message>
<source>Sent to</source>
<translation>Überwiesen an</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Eigenüberweisung</translation>
</message>
<message>
<source>Mined</source>
<translation>Erarbeitet</translation>
</message>
<message>
<source>watch-only</source>
<translation>beobachtet</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(k.A.)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaktionsstatus, fahren Sie mit der Maus über dieses Feld, um die Anzahl der Bestätigungen zu sehen.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Datum und Uhrzeit zu der die Transaktion empfangen wurde.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Art der Transaktion</translation>
</message>
<message>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Zeigt an, ob eine beobachtete Adresse in diese Transaktion involviert ist.</translation>
</message>
<message>
<source>User-defined intent/purpose of the transaction.</source>
<translation>Benutzerdefinierte Absicht bzw. Verwendungszweck der Transaktion</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Der Betrag, der dem Kontostand abgezogen oder hinzugefügt wurde.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Alle</translation>
</message>
<message>
<source>Today</source>
<translation>Heute</translation>
</message>
<message>
<source>This week</source>
<translation>Diese Woche</translation>
</message>
<message>
<source>This month</source>
<translation>Diesen Monat</translation>
</message>
<message>
<source>Last month</source>
<translation>Letzten Monat</translation>
</message>
<message>
<source>This year</source>
<translation>Dieses Jahr</translation>
</message>
<message>
<source>Range...</source>
<translation>Zeitraum</translation>
</message>
<message>
<source>Received with</source>
<translation>Empfangen über</translation>
</message>
<message>
<source>Sent to</source>
<translation>Überwiesen an</translation>
</message>
<message>
<source>To yourself</source>
<translation>Eigenüberweisung</translation>
</message>
<message>
<source>Mined</source>
<translation>Erarbeitet</translation>
</message>
<message>
<source>Other</source>
<translation>Andere</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Zu suchende Adresse oder Bezeichnung eingeben</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minimaler Betrag</translation>
</message>
<message>
<source>Copy address</source>
<translation>Adresse kopieren</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Transaktions-ID kopieren</translation>
</message>
<message>
<source>Copy raw transaction</source>
<translation>Kopiere rohe Transaktion</translation>
</message>
<message>
<source>Edit label</source>
<translation>Bezeichnung bearbeiten</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Transaktionsdetails anzeigen</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Transaktionsverlauf exportieren</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Beobachtet</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exportieren fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Beim Speichern des Transaktionsverlaufs nach %1 ist ein Fehler aufgetreten.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exportieren erfolgreich</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>Speichern des Transaktionsverlaufs nach %1 war erfolgreich.</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagetrennte-Datei (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bestätigt</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Zeitraum:</translation>
</message>
<message>
<source>to</source>
<translation>bis</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Die Einheit in der Beträge angezeigt werden. Klicken, um eine andere Einheit auszuwählen.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Es wurde keine Wallet geladen.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Zetacoins überweisen</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>E&xportieren</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Wallet sichern</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Wallet-Daten (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Sicherung fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Beim Speichern der Wallet-Daten nach %1 ist ein Fehler aufgetreten.</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>Speichern der Wallet-Daten nach %1 war erfolgreich.</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Sicherung erfolgreich</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Optionen:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Datenverzeichnis festlegen</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Mit dem angegebenen Knoten verbinden, um Adressen von Gegenstellen abzufragen, danach trennen</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Die eigene öffentliche Adresse angeben</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Kommandozeilen- und JSON-RPC-Befehle annehmen</translation>
</message>
<message>
<source>If <category> is not supplied or if <category> = 1, output all debugging information.</source>
<translation>Wenn <category> nicht angegeben wird oder <category>=1, jegliche Debugginginformationen ausgeben.</translation>
</message>
<message>
<source>Maximum total fees (in %s) to use in a single wallet transaction; setting this too low may abort large transactions (default: %s)</source>
<translation>Maximale Gesamtgebühr (in %s) in einer Börsentransaktion; wird dies zu niedrig gesetzten können große Transaktionen abgebrochen werden (Standard: %s)</translation>
</message>
<message>
<source>Please check that your computer's date and time are correct! If your clock is wrong Zetacoin Core will not work properly.</source>
<translation>Bitte korrigieren Sie die Datums- und Uhrzeiteinstellungen Ihres Computers, da Zetacoin Core ansonsten nicht ordnungsgemäß funktionieren wird.</translation>
</message>
<message>
<source>Prune configured below the minimum of %d MiB. Please use a higher number.</source>
<translation>Kürzungsmodus wurde kleiner als das Minimum in Höhe von %d MiB konfiguriert. Bitte verwenden Sie einen größeren Wert.</translation>
</message>
<message>
<source>Reduce storage requirements by pruning (deleting) old blocks. This mode is incompatible with -txindex and -rescan. Warning: Reverting this setting requires re-downloading the entire blockchain. (default: 0 = disable pruning blocks, >%u = target size in MiB to use for block files)</source>
<translation>Speicherplatzanforderung durch kürzen (löschen) alter Blöcke reduzieren. Dieser Modus ist nicht mit -txindex und -rescan kompatibel. Warnung: Die Umkehr dieser Einstellung erfordert das erneute Herunterladen der gesamten Blockkette. (Standard: 0 = deaktiviert das Kürzen von Blöcken, >%u = Zielgröße in MiB, die für Blockdateien verwendet werden darf)</translation>
</message>
<message>
<source>Error: A fatal internal error occurred, see debug.log for details</source>
<translation>Fehler: Ein schwerer interner Fehler ist aufgetreten, siehe debug.log für Details.</translation>
</message>
<message>
<source>Fee (in %s/kB) to add to transactions you send (default: %s)</source>
<translation>Gebühr (in %s/kB), die von Ihnen gesendeten Transaktionen hinzugefügt wird (Standard: %s)</translation>
</message>
<message>
<source>Pruning blockstore...</source>
<translation>Kürze Blockspeicher...</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Als Hintergrunddienst ausführen und Befehle annehmen</translation>
</message>
<message>
<source>Unable to start HTTP server. See debug log for details.</source>
<translation>Kann HTTP Server nicht starten. Siehe debug log für Details.</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Eingehende Verbindungen annehmen (Standard: 1, wenn nicht -proxy oder -connect)</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>An die angegebene Adresse binden und immer abhören. Für IPv6 "[Host]:Port"-Notation verwenden</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Alle Wallet-Transaktionen löschen und nur diese Teilbereiche der Blockkette durch -rescan beim Starten wiederherstellen</translation>
</message>
<message>
<source>Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</source>
<translation>Veröffentlicht unter der MIT-Softwarelizenz, siehe beiligende Datei COPYING oder <http://www.opensource.org/licenses/mit-license.php>.</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Befehl ausführen wenn sich eine Wallet-Transaktion verändert (%s im Befehl wird durch die Transaktions-ID ersetzt)</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Maximale Anzahl an Skript-Verifizierungs-Threads festlegen (%u bis %d, 0 = automatisch, <0 = so viele Kerne frei lassen, Standard: %d)</translation>
</message>
<message>
<source>The block database contains a block which appears to be from the future. This may be due to your computer's date and time being set incorrectly. Only rebuild the block database if you are sure that your computer's date and time are correct</source>
<translation>Die Block-Datenbank enthält einen Block, der in der Zukunft auftaucht. Dies kann daran liegen, dass die Systemzeit Ihres Computers falsch eingestellt ist. Stellen Sie die Block-Datenbank nur wieder her, wenn Sie sich sicher sind, dass Ihre Systemzeit korrekt eingestellt ist.</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Dies ist eine Vorab-Testversion - Verwendung auf eigene Gefahr - nicht für Mining- oder Handelsanwendungen nutzen!</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. Zetacoin Core is probably already running.</source>
<translation>Kann auf diesem Computer nicht an %s binden, da Zetacoin Core wahrscheinlich bereits gestartet wurde.</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening and no -proxy)</source>
<translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: 1, wenn abgehört wird und -proxy nicht gesetzt ist)</translation>
</message>
<message>
<source>WARNING: abnormally high number of blocks generated, %d blocks received in the last %d hours (%d expected)</source>
<translation>Warnung: Es wurde eine ungewöhnlich hohe Anzahl Blöcke erzeugt, %d Blöcke wurden in den letzten %d Stunden empfangen (%d wurden erwartet).</translation>
</message>
<message>
<source>WARNING: check your network connection, %d blocks received in the last %d hours (%d expected)</source>
<translation>Warnung: Überprüpfen Sie ihre Netzwerkverbindung, %d Blöcke wurden in den letzten %d Stunden empfangen (%d wurden erwartet).</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Warnung: Das Netzwerk scheint nicht vollständig übereinzustimmen! Einige Miner scheinen Probleme zu haben.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Warnung: Wir scheinen nicht vollständig mit unseren Gegenstellen übereinzustimmen! Sie oder die anderen Knoten müssen unter Umständen Ihre Client-Software aktualisieren.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Warnung: wallet.dat beschädigt, Datenrettung erfolgreich! Original wallet.dat wurde als wallet.{Zeitstempel}.dat in %s gespeichert. Falls Ihr Kontostand oder Transaktionen nicht korrekt sind, sollten Sie von einer Datensicherung wiederherstellen.</translation>
</message>
<message>
<source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source>
<translation>Gegenstellen die sich von der angegebenen Netzmaske oder IP-Adresse aus verbinden immer zulassen. Kann mehrmals angegeben werden.</translation>
</message>
<message>
<source>-maxmempool must be at least %d MB</source>
<translation>-maxmempool muss mindestens %d MB betragen</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> kann sein:</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Blockerzeugungsoptionen:</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Mit nur dem oder den angegebenen Knoten verbinden</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Verbindungsoptionen:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Beschädigte Blockdatenbank erkannt</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Debugging-/Testoptionen:</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Die Wallet nicht laden und Wallet-RPC-Aufrufe deaktivieren</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Möchten Sie die Blockdatenbank jetzt neu aufbauen?</translation>
</message>
<message>
<source>Enable publish hash block in <address></source>
<translation>Aktiviere das Veröffentlichen des Hash-Blocks in <address></translation>
</message>
<message>
<source>Enable publish hash transaction in <address></source>
<translation>Aktiviere das Veröffentlichen der Hash-Transaktion in <address></translation>
</message>
<message>
<source>Enable publish raw block in <address></source>
<translation>Aktiviere das Veröffentlichen des Raw-Blocks in <address></translation>
</message>
<message>
<source>Enable publish raw transaction in <address></source>
<translation>Aktiviere das Veröffentlichen der Roh-Transaktion in <address></translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Fehler beim Initialisieren der Blockdatenbank</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Fehler beim Initialisieren der Wallet-Datenbankumgebung %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Fehler beim Laden der Blockdatenbank</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Fehler beim Öffnen der Blockdatenbank</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Fehler: Zu wenig freier Speicherplatz auf dem Datenträger!</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Fehler, es konnte kein Port abgehört werden. Wenn dies so gewünscht wird -listen=0 verwenden.</translation>
</message>
<message>
<source>Importing...</source>
<translation>Importiere...</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Fehlerhafter oder kein Genesis-Block gefunden. Falsches Datenverzeichnis für das Netzwerk?</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Ungültige "-onion"-Adresse: '%s'</translation>
</message>
<message>
<source>Keep the transaction memory pool below <n> megabytes (default: %u)</source>
<translation>Halten Sie den Transaktionsspeicherpool unter <n> Megabytes (Voreinstellung: %u)</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Nicht genügend Datei-Deskriptoren verfügbar.</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Nur zu Knoten des Netzwerktyps <net> verbinden (ipv4, ipv6 oder onion)</translation>
</message>
<message>
<source>Prune cannot be configured with a negative value.</source>
<translation>Kürzungsmodus kann nicht mit einem negativen Wert konfiguriert werden.</translation>
</message>
<message>
<source>Prune mode is incompatible with -txindex.</source>
<translation>Kürzungsmodus ist nicht mit -txindex kompatibel.</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Größe des Datenbankcaches in Megabyte festlegen (%d bis %d, Standard: %d)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Maximale Blockgröße in Byte festlegen (Standard: %d)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Wallet-Datei angeben (innerhalb des Datenverzeichnisses)</translation>
</message>
<message>
<source>Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation>Nicht unterstütztes Argument -benchmark wurde ignoriert, bitte -debug=bench verwenden.</translation>
</message>
<message>
<source>Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation>Nicht unterstütztes Argument -debugnet wurde ignoriert, bitte -debug=net verwenden.</translation>
</message>
<message>
<source>Unsupported argument -tor found, use -onion.</source>
<translation>Nicht unterstütztes Argument -tor gefunden, bitte -onion verwenden.</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: %u)</translation>
</message>
<message>
<source>User Agent comment (%s) contains unsafe characters.</source>
<translation>Der User Agent Kommentar (%s) enthält unsichere Zeichen.</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Verifiziere Blöcke...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Verifiziere Wallet...</translation>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Wallet %s liegt außerhalb des Datenverzeichnisses %s</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Wallet-Optionen:</translation>
</message>
<message>
<source>Warning: This version is obsolete; upgrade required!</source>
<translation>Warnung: Diese Version is veraltet, Aktualisierung erforderlich!</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Sie müssen die Datenbank mit Hilfe von -reindex neu aufbauen, um -txindex zu verändern</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation>JSON-RPC-Verbindungen von der angegeben Quelle erlauben. Gültig für <ip> ist eine einzelne IP-Adresse (z.B. 1.2.3.4), ein Netzwerk bzw. eine Netzmaske (z.B. 1.2.3.4/255.255.255.0), oder die CIDR-Notation (z.B. 1.2.3.4/24). Kann mehrmals angegeben werden.</translation>
</message>
<message>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation>An die angegebene Adresse binden und Gegenstellen, die sich dorthin verbinden, immer zulassen. Für IPv6 "[Host]:Port"-Notation verwenden</translation>
</message>
<message>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation>An die angegebene Adresse binden und nach eingehenden JSON-RPC-Verbindungen abhören. Für IPv6 "[Host]:Port"-Notation verwenden. Kann mehrmals angegeben werden. (Standard: an alle Schnittstellen binden)</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Zetacoin Core is probably already running.</source>
<translation>Datenverzeichnis %s kann nicht gesperrt werden, da Zetacoin Core wahrscheinlich bereits gestartet wurde.</translation>
</message>
<message>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation>Neue Dateien mit Standard-Systemrechten erzeugen, anstatt mit umask 077 (nur mit deaktivierter Walletfunktion nutzbar)</translation>
</message>
<message>
<source>Discover own IP addresses (default: 1 when listening and no -externalip or -proxy)</source>
<translation>Eigene IP-Adressen ermitteln (Standard: 1, wenn abgehört wird und nicht -externalip oder -proxy)</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Fehler: Abhören nach eingehenden Verbindungen fehlgeschlagen (listen meldete Fehler %s)</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Befehl ausführen wenn ein relevanter Alarm empfangen wird oder wir einen wirklich langen Fork entdecken (%s im Befehl wird durch die Nachricht ersetzt)</translation>
</message>
<message>
<source>Fees (in %s/kB) smaller than this are considered zero fee for relaying, mining and transaction creation (default: %s)</source>
<translation>Niedrigere Gebühren (in %s/Kb) als diese werden bei der Transaktionserstellung als gebührenfrei angesehen (Standard: %s)</translation>
</message>
<message>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation>Wenn -paytxfee nicht festgelegt wurde Gebühren einschließen, so dass mit der Bestätigung von Transaktionen im Schnitt innerhalb von n Blöcken begonnen wird (Standard: %u)</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation>Ungültiger Betrag für -maxtxfee=<amount>: '%s' (muss mindestens die minimale Weiterleitungsgebühr in Höhe von %s sein, um zu verhindern dass Transaktionen nicht bearbeitet werden)</translation>
</message>
<message>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation>Maximale Datengröße in "Data Carrier"-Transaktionen die weitergeleitet und erarbeitet werden (Standard: %u)</translation>
</message>
<message>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation>Adressen von Gegenstellen via DNS-Namensauflösung finden, falls zu wenige Adressen verfügbar sind (Standard: 1, außer bei -connect)</translation>
</message>
<message>
<source>Randomize credentials for every proxy connection. This enables Tor stream isolation (default: %u)</source>
<translation>Zufällige Anmeldedaten für jede Proxyverbindung verwenden. Dies aktiviert Tor-Datenflussisolation (Standard: %u)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Maximale Größe in Byte von "high-priority/low-fee"-Transaktionen festlegen (Standard: %d)</translation>
</message>
<message>
<source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source>
<translation>Maximale Anzahl an Threads zur Zetacoinerzeugung, wenn aktiviert, festlegen (-1 = alle Kerne, Standard: %d)</translation>
</message>
<message>
<source>The transaction amount is too small to send after the fee has been deducted</source>
<translation>Der Transaktionsbetrag ist zum senden zu niedrig, nachdem die Gebühr abgezogen wurde.</translation>
</message>
<message>
<source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source>
<translation>Dieses Produkt enthält Software, die vom OpenSSL-Projekt zur Verwendung im OpenSSL-Toolkit <https://www.openssl.org/> entwickelt wird, sowie von Eric Young geschriebene kryptographische Software und von Thomas Bernard geschriebene UPnP-Software.</translation>
</message>
<message>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation>Erlaubte Gegenstellen werden nicht für DoS-Attacken gesperrt und ihre Transkationen werden immer weitergeleitet, auch wenn sie sich bereits im Speicherpool befinden, was z.B. für Gateways sinnvoll ist.</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to go back to unpruned mode. This will redownload the entire blockchain</source>
<translation>Sie müssen die Datenbank mit Hilfe von -reindex neu aufbauen, um zum ungekürzten Modus zurückzukehren. Dies erfordert, dass die gesamte Blockkette erneut heruntergeladen wird.</translation>
</message>
<message>
<source>(default: %u)</source>
<translation>(Standard: %u)</translation>
</message>
<message>
<source>Accept public REST requests (default: %u)</source>
<translation>Öffentliche REST-Anfragen annehmen (Standard: %u)</translation>
</message>
<message>
<source>Activating best chain...</source>
<translation>Aktiviere beste Blockkette...</translation>
</message>
<message>
<source>Always relay transactions received from whitelisted peers (default: %d)</source>
<translation>Geben Sie immer die Transaktionen, die Sie von freigegebenen Peers erhalten haben, weiter (Voreinstellung: %d)</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat on startup</source>
<translation>Versuchen, private Schlüssel beim Starten aus einer beschädigten wallet.dat wiederherzustellen</translation>
</message>
<message>
<source>Automatically create Tor hidden service (default: %d)</source>
<translation>Automatisch versteckten Tor-Dienst erstellen (Standard: %d)</translation>
</message>
<message>
<source>Cannot resolve -whitebind address: '%s'</source>
<translation>Kann Adresse in -whitebind nicht auflösen: '%s'</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Über einen SOCKS5-Proxy &verbinden</translation>
</message>
<message>
<source>Copyright (C) 2009-%i The Zetacoin Core Developers</source>
<translation>Urheberrecht (C) 2009-%i Die "Zetacoin Core"-Entwickler</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of Zetacoin Core</source>
<translation>Fehler beim Laden von wallet.dat: Wallet benötigt neuere Version von Zetacoin Core</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Fehler beim lesen der Datenbank, Ausführung wird beendet.</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file on startup</source>
<translation>Blöcke beim Starten aus externer Datei blk000??.dat importieren</translation>
</message>
<message>
<source>Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Initialization sanity check failed. Zetacoin Core is shutting down.</source>
<translation>Initialisierungsplausibilitätsprüfung fehlgeschlagen. Zetacoin Core wird beendet.</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -maxtxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation>Ungültiger Betrag für -paytxfee=<amount>: '%s' (muss mindestens %s sein)</translation>
</message>
<message>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation>Ungültige Netzmaske angegeben in -whitelist: '%s'</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Maximal <n> nicht-verbindbare Transaktionen im Speicher halten (Standard: %u)</translation>
</message>
<message>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation>Angabe eines Ports benötigt für -whitebind: '%s'</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Knoten-Weiterleitungsoptionen:</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>RPC-Serveroptionen:</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files on startup</source>
<translation>Blockkettenindex aus aktuellen Dateien blk000??.dat beim Starten wiederaufbauen</translation>
</message>
<message>
<source>Receive and display P2P network alerts (default: %u)</source>
<translation>P2P-Netzwerk-Alarme empfangen und anzeigen (Standard: %u)</translation>
</message>
<message>
<source>Reducing -maxconnections from %d to %d, because of system limitations.</source>
<translation>Reduziere -maxconnections von %d zu %d, aufgrund von Systemlimitierungen.</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions on startup</source>
<translation>Blockkette beim Starten erneut nach fehlenden Wallet-Transaktionen durchsuchen</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Rückverfolgungs- und Debuginformationen an die Konsole senden, anstatt sie in debug.log zu schreiben</translation>
</message>
<message>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation>Transaktionen, wenn möglich, als gebührenfreie Transaktion senden (Standard: %u)</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Zeige alle Debuggingoptionen (Benutzung: --help -help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Protokolldatei debug.log beim Starten des Clients kürzen (Standard: 1, wenn kein -debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Signierung der Transaktion fehlgeschlagen</translation>
</message>
<message>
<source>The transaction amount is too small to pay the fee</source>
<translation>Der Transaktionsbetrag ist zu niedrig, um die Gebühr zu bezahlen.</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Dies ist experimentelle Software.</translation>
</message>
<message>
<source>Tor control port password (default: empty)</source>
<translation>TOR Kontrollport Passwort (Standard: leer)</translation>
</message>
<message>
<source>Tor control port to use if onion listening enabled (default: %s)</source>
<translation>Zu benutzender TOR Kontrollport wenn Onion Auflistung aktiv ist (Standard: %s)</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transaktionsbetrag zu niedrig</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Transaktionsbeträge müssen positiv sein</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>Transaktion ist für die Gebührenrichtlinie zu groß</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transaktion zu groß</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>Kann auf diesem Computer nicht an %s binden (bind meldete Fehler %s)</translation>
</message>
<message>
<source>Upgrade wallet to latest format on startup</source>
<translation>Wallet beim Starten auf das neueste Format aktualisieren</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Benutzername für JSON-RPC-Verbindungen</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart Zetacoin Core to complete</source>
<translation>Wallet musste neu geschrieben werden: starten Sie Zetacoin Core zur Fertigstellung neu</translation>
</message>
<message>
<source>Warning</source>
<translation>Warnung</translation>
</message>
<message>
<source>Whether to operate in a blocks only mode (default: %u)</source>
<translation>Legt fest ob nur Blöcke Modus aktiv sein soll (Standard: %u)</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Lösche alle Transaktionen aus Wallet...</translation>
</message>
<message>
<source>ZeroMQ notification options:</source>
<translation>ZeroMQ-Benachrichtigungsoptionen:</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat beschädigt, Datenrettung fehlgeschlagen</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Passwort für JSON-RPC-Verbindungen</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Befehl ausführen wenn der beste Block wechselt (%s im Befehl wird durch den Hash des Blocks ersetzt)</translation>
</message>
<message>
<source>This help message</source>
<translation>Dieser Hilfetext</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Erlaube DNS-Abfragen für -addnode, -seednode und -connect</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Lade Adressen...</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Fehler beim Laden von wallet.dat: Wallet beschädigt</translation>
</message>
<message>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation>(1 = TX-Metadaten wie z.B. Accountbesitzer und Zahlungsanforderungsinformationen behalten, 2 = TX-Metadaten verwerfen)</translation>
</message>
<message>
<source>-maxtxfee is set very high! Fees this large could be paid on a single transaction.</source>
<translation>-maxtxfee ist auf einen sehr hohen Wert festgelegt! Gebühren dieser Höhe könnten für eine einzelne Transaktion bezahlt werden.</translation>
</message>
<message>
<source>-paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>-paytxfee ist auf einen sehr hohen Wert festgelegt! Dies ist die Gebühr die beim Senden einer Transaktion fällig wird.</translation>
</message>
<message>
<source>Do not keep transactions in the mempool longer than <n> hours (default: %u)</source>
<translation>Die Transaktion nicht länger im Speicherpool behalten als <n> Stunden (Standard: %u)</translation>
</message>
<message>
<source>Error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Lesen von wallet.dat fehlgeschlagen! Alle Schlüssel wurden korrekt gelesen, Transaktionsdaten bzw. Adressbucheinträge fehlen aber möglicherweise oder sind inkorrekt.</translation>
</message>
<message>
<source>Fees (in %s/kB) smaller than this are considered zero fee for transaction creation (default: %s)</source>
<translation>Niedrigere Gebühren (in %s/Kb) als diese werden bei der Transaktionserstellung als gebührenfrei angesehen (Standard: %s)</translation>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source>
<translation>Legt fest, wie gründlich die Blockverifikation von -checkblocks ist (0-4, Standard: %u)</translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Einen vollständigen Transaktionsindex führen, der vom RPC-Befehl "getrawtransaction" genutzt wird (Standard: %u)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Anzahl Sekunden, während denen sich nicht konform verhaltenden Gegenstellen die Wiederverbindung verweigert wird (Standard: %u)</translation>
</message>
<message>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation>Debugginginformationen ausgeben (Standard: %u, <category> anzugeben ist optional)</translation>
</message>
<message>
<source>Total length of network version string (%i) exceeds maximum length (%i). Reduce the number or size of uacomments.</source>
<translation>Gesamtlänge des Netzwerkversionstrings (%i) erreicht die maximale Länge (%i). Reduzieren Sie die Nummer oder die Größe von uacomments.</translation>
</message>
<message>
<source>Tries to keep outbound traffic under the given target (in MiB per 24h), 0 = no limit (default: %d)</source>
<translation>Versucht ausgehenden Datenverkehr unter dem gegebenen Wert zu halten (in MiB pro 24h), 0 = kein Limit (default: %d)</translation>
</message>
<message>
<source>Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation>Nicht unterstütztes Argument -socks gefunden. Das Festlegen der SOCKS-Version ist nicht mehr möglich, nur noch SOCKS5-Proxies werden unterstützt.</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation>Separaten SOCKS5-Proxy verwenden, um Gegenstellen über versteckte Tor-Dienste zu erreichen (Standard: %s)</translation>
</message>
<message>
<source>Username and hashed password for JSON-RPC connections. The field <userpw> comes in the format: <USERNAME>:<SALT>$<HASH>. A canonical python script is included in share/rpcuser. This option can be specified multiple times</source>
<translation>Benutzername und gehashtes Passwort für JSON-RPC Verbindungen. Das Feld <userpw> kommt im Format: <USERNAME>:<SALT>$<HASH>. Ein kanonisches Pythonskript ist in share/rpcuser inbegriffen. Diese Option kann mehrere Male spezifiziert werden</translation>
</message>
<message>
<source>(default: %s)</source>
<translation>(Standard: %s)</translation>
</message>
<message>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation>Adressen von Gegenstellen immer über DNS-Namensauflösung abfragen (Standard: %u)</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Fehler beim Laden von wallet.dat</translation>
</message>
<message>
<source>Generate coins (default: %u)</source>
<translation>Zetacoins erzeugen (Standard: %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Wieviele Blöcke beim Starten geprüft werden sollen (Standard: %u, 0 = alle)</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>IP-Adressen in Debugausgabe einschließen (Standard: %u)</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Ungültige Adresse in -proxy: '%s'</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation><port> nach JSON-RPC-Verbindungen abhören (Standard: %u oder Testnetz: %u)</translation>
</message>
<message>
<source>Listen for connections on <port> (default: %u or testnet: %u)</source>
<translation><port> nach Verbindungen abhören (Standard: %u oder Testnetz: %u)</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Maximal <n> Verbindungen zu Gegenstellen aufrechterhalten (Standard: %u)</translation>
</message>
<message>
<source>Make the wallet broadcast transactions</source>
<translation>Die Wallet soll Transaktionen übertragen/broadcasten</translation>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximale Größe des Empfangspuffers pro Verbindung, <n> * 1000 Byte (Standard: %u)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximale Größe des Sendepuffers pro Verbindung, <n> * 1000 Byte (Standard: %u)</translation>
</message>
<message>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation>Debugausgaben einen Zeitstempel voranstellen (Standard: %u)</translation>
</message>
<message>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation>"Data Carrier"-Transaktionen weiterleiten und erarbeiten (Standard: %u)</translation>
</message>
<message>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation>Nicht-"P2SH-Multisig" weiterleiten (Standard: %u)</translation>
</message>
<message>
<source>Set key pool size to <n> (default: %u)</source>
<translation>Größe des Schlüsselpools festlegen auf <n> (Standard: %u)</translation>
</message>
<message>
<source>Set minimum block size in bytes (default: %u)</source>
<translation>Minimale Blockgröße in Byte festlegen (Standard: %u)</translation>
</message>
<message>
<source>Set the number of threads to service RPC calls (default: %d)</source>
<translation>Maximale Anzahl an Threads zur Verarbeitung von RPC-Anfragen festlegen (Standard: %d)</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Konfigurationsdatei festlegen (Standard: %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Verbindungzeitüberschreitung in Millisekunden festlegen (Minimum: 1, Standard: %d)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>PID-Datei festlegen (Standard: %s)</translation>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation>Unbestätigtes Wechselgeld darf beim Senden von Transaktionen ausgegeben werden (Standard: %u)</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Schwellenwert, um Verbindungen zu sich nicht konform verhaltenden Gegenstellen zu beenden (Standard: %u)</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Unbekannter Netztyp in -onlynet angegeben: '%s'</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kann Adresse in -bind nicht auflösen: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kann Adresse in -externalip nicht auflösen: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Unzureichender Kontostand</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Lade Blockindex...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Mit dem angegebenen Knoten verbinden und versuchen die Verbindung aufrecht zu erhalten</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Lade Wallet...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Wallet kann nicht auf eine ältere Version herabgestuft werden</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Standardadresse kann nicht geschrieben werden</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Durchsuche erneut...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Laden abgeschlossen</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
</context>
</TS>
|
<message>
<source>Zetacoin Core client</source>
<translation>"Zetacoin Core"-Client</translation>
</message>
|
rootfs.go
|
package image // import "github.com/docker/docker/image"
import (
"runtime"
"github.com/docker/docker/layer"
"github.com/sirupsen/logrus"
)
// TypeLayers is used for RootFS.Type for filesystems organized into layers.
const TypeLayers = "layers"
// typeLayersWithBase is an older format used by Windows up to v1.12. We
// explicitly handle this as an error case to ensure that a daemon which still
// has an older image like this on disk can still start, even though the
// image itself is not usable. See https://github.com/docker/docker/pull/25806.
const typeLayersWithBase = "layers+base"
// RootFS describes images root filesystem
// This is currently a placeholder that only supports layers. In the future
// this can be made into an interface that supports different implementations.
type RootFS struct {
Type string `json:"type"`
DiffIDs []layer.DiffID `json:"diff_ids,omitempty"`
}
// NewRootFS returns empty RootFS struct
func
|
() *RootFS {
return &RootFS{Type: TypeLayers}
}
// Append appends a new diffID to rootfs
func (r *RootFS) Append(id layer.DiffID) {
r.DiffIDs = append(r.DiffIDs, id)
}
// Clone returns a copy of the RootFS
func (r *RootFS) Clone() *RootFS {
newRoot := NewRootFS()
newRoot.Type = r.Type
newRoot.DiffIDs = make([]layer.DiffID, len(r.DiffIDs))
copy(newRoot.DiffIDs, r.DiffIDs)
return newRoot
}
// ChainID returns the ChainID for the top layer in RootFS.
func (r *RootFS) ChainID() layer.ChainID {
if runtime.GOOS == "windows" && r.Type == typeLayersWithBase {
logrus.Warnf("Layer type is unsupported on this platform. DiffIDs: '%v'", r.DiffIDs)
return ""
}
return layer.CreateChainID(r.DiffIDs)
}
|
NewRootFS
|
sanitized-files.test.ts
|
import {createSanitizedTestInput} from './sanitized-test';
async function getAllSanitizedFiles(): Promise<string[]> {
return Array.from(await getAllRecursiveFiles(sanitizedFilesDir)).map((subPath) =>
join(sanitizedFilesDir, subPath),
);
}
testGroup({
description: 'run all sanitized file tests',
tests: async (runTest) => {
(await getAllSanitizedFiles()).forEach((filePath) => {
runTest(createSanitizedTestInput(filePath));
});
},
});
|
import {join} from 'path';
import {testGroup} from 'test-vir';
import {getAllRecursiveFiles, sanitizedFilesDir} from '../repo-paths';
|
|
ssh.py
|
# Copyright (c) 2009 Upi Tamminen <[email protected]>
# See the COPYRIGHT file for more information
import getopt
import hashlib
import re
import socket
import time
from twisted.internet import reactor
from twisted.python import log
from cowrie.core.config import CowrieConfig
from cowrie.shell.command import HoneyPotCommand
commands = {}
OUTPUT = [
'usage: ssh [-46AaCfGgKkMNnqsTtVvXxYy] [-B bind_interface]',
' [-b bind_address] [-c cipher_spec] [-D [bind_address:]port]',
' [-E log_file] [-e escape_char] [-F configfile] [-I pkcs11]',
' [-i identity_file] [-J [user@]host[:port]] [-L address]',
' [-l login_name] [-m mac_spec] [-O ctl_cmd] [-o option] [-p port]', # noqa
' [-Q query_option] [-R address] [-S ctl_path] [-W host:port]',
' [-w local_tun[:remote_tun]] destination [command]'
]
class command_ssh(HoneyPotCommand):
def valid_ip(self, address):
try:
socket.inet_aton(address)
return True
except Exception:
return False
def start(self):
try:
options = '-1246AaCfgKkMNnqsTtVvXxYb:c:D:e:F:i:L:l:m:O:o:p:R:S:w:'
optlist, args = getopt.getopt(self.args, options)
except getopt.GetoptError:
self.write('Unrecognized option\n')
self.exit()
for opt in optlist:
if opt[0] == '-V':
self.write(CowrieConfig().get('shell', 'ssh_version',
fallback="OpenSSH_7.9p1, OpenSSL 1.1.1a 20 Nov 2018")+"\n")
self.exit()
return
if not len(args):
for line in OUTPUT:
self.write(f'{line}\n')
self.exit()
return
user, host = 'root', args[0]
for opt in optlist:
if opt[0] == '-l':
user = opt[1]
if args[0].count('@'):
user, host = args[0].split('@', 1)
if re.match('^[0-9.]+$', host):
if self.valid_ip(host):
self.ip = host
else:
self.write('ssh: Could not resolve hostname {}: \
Name or service not known\n'.format(host))
self.exit()
else:
s = hashlib.md5(host.encode()).hexdigest()
self.ip = '.'.join([str(int(x, 16)) for x in
(s[0:2], s[2:4], s[4:6], s[6:8])])
self.host = host
self.user = user
self.write('The authenticity of host \'{} ({})\' \
can\'t be established.\n'.format(self.host, self.ip))
self.write('RSA key fingerprint is \
9d:30:97:8a:9e:48:0d:de:04:8d:76:3a:7b:4b:30:f8.\n')
self.write('Are you sure you want to continue connecting (yes/no)? ')
self.callbacks = [self.yesno, self.wait]
def
|
(self, line):
self.write('Warning: Permanently added \'{}\' (RSA) to the \
list of known hosts.\n'.format(self.host))
self.write(f'{self.user}@{self.host}\'s password: ')
self.protocol.password_input = True
def wait(self, line):
reactor.callLater(2, self.finish, line)
def finish(self, line):
self.pause = False
rest, host = self.host, 'localhost'
rest = self.host.strip().split('.')
if len(rest) and rest[0].isalpha():
host = rest[0]
self.protocol.hostname = host
self.protocol.cwd = '/root'
if not self.fs.exists(self.protocol.cwd):
self.protocol.cwd = '/'
self.protocol.password_input = False
self.write('Linux {} 2.6.26-2-686 #1 SMP Wed Nov 4 20:45:37 \
UTC 2009 i686\n'.format(self.protocol.hostname))
self.write('Last login: %s from 192.168.9.4\n'
% (time.ctime(time.time() - 123123),))
self.exit()
def lineReceived(self, line):
log.msg('INPUT (ssh):', line)
if len(self.callbacks):
self.callbacks.pop(0)(line)
commands['/usr/bin/ssh'] = command_ssh
commands['ssh'] = command_ssh
|
yesno
|
fw_test.go
|
package ospf
import (
"reflect"
"testing"
"github.com/willhu-commit/pango/testdata"
)
func TestFwNormalization(t *testing.T)
|
{
testCases := getTests()
mc := &testdata.MockClient{}
ns := FirewallNamespace(mc)
vr := "mockVr"
for _, tc := range testCases {
t.Run(tc.desc, func(t *testing.T) {
mc.Version = tc.version
mc.Reset()
mc.AddResp("")
err := ns.Set(vr, tc.conf)
if err != nil {
t.Errorf("Error in set: %s", err)
} else {
mc.AddResp(mc.Elm)
r, err := ns.Get(vr)
if err != nil {
t.Errorf("Error in get: %s", err)
}
if !reflect.DeepEqual(tc.conf, r) {
t.Errorf("%#v != %#v", tc.conf, r)
}
}
})
}
}
|
|
IERC20.ts
|
/* Autogenerated file. Do not edit manually. */
/* tslint:disable */
/* eslint-disable */
import BN from "bn.js";
|
import { EventLog } from "web3-core";
import { EventEmitter } from "events";
import {
Callback,
PayableTransactionObject,
NonPayableTransactionObject,
BlockType,
ContractEventLog,
BaseContract,
} from "./types";
export interface EventOptions {
filter?: object;
fromBlock?: BlockType;
topics?: string[];
}
export type Approval = ContractEventLog<{
owner: string;
spender: string;
value: string;
0: string;
1: string;
2: string;
}>;
export type Transfer = ContractEventLog<{
from: string;
to: string;
value: string;
0: string;
1: string;
2: string;
}>;
export interface IERC20 extends BaseContract {
constructor(
jsonInterface: any[],
address?: string,
options?: ContractOptions
): IERC20;
clone(): IERC20;
methods: {
/**
* Returns the amount of tokens in existence.
*/
totalSupply(): NonPayableTransactionObject<string>;
/**
* Returns the amount of tokens owned by `account`.
*/
balanceOf(account: string): NonPayableTransactionObject<string>;
/**
* Moves `amount` tokens from the caller's account to `to`. Returns a boolean value indicating whether the operation succeeded. Emits a {Transfer} event.
*/
transfer(
to: string,
amount: number | string | BN
): NonPayableTransactionObject<boolean>;
/**
* Returns the remaining number of tokens that `spender` will be allowed to spend on behalf of `owner` through {transferFrom}. This is zero by default. This value changes when {approve} or {transferFrom} are called.
*/
allowance(
owner: string,
spender: string
): NonPayableTransactionObject<string>;
/**
* Sets `amount` as the allowance of `spender` over the caller's tokens. Returns a boolean value indicating whether the operation succeeded. IMPORTANT: Beware that changing an allowance with this method brings the risk that someone may use both the old and the new allowance by unfortunate transaction ordering. One possible solution to mitigate this race condition is to first reduce the spender's allowance to 0 and set the desired value afterwards: https://github.com/ethereum/EIPs/issues/20#issuecomment-263524729 Emits an {Approval} event.
*/
approve(
spender: string,
amount: number | string | BN
): NonPayableTransactionObject<boolean>;
/**
* Moves `amount` tokens from `from` to `to` using the allowance mechanism. `amount` is then deducted from the caller's allowance. Returns a boolean value indicating whether the operation succeeded. Emits a {Transfer} event.
*/
transferFrom(
from: string,
to: string,
amount: number | string | BN
): NonPayableTransactionObject<boolean>;
};
events: {
Approval(cb?: Callback<Approval>): EventEmitter;
Approval(options?: EventOptions, cb?: Callback<Approval>): EventEmitter;
Transfer(cb?: Callback<Transfer>): EventEmitter;
Transfer(options?: EventOptions, cb?: Callback<Transfer>): EventEmitter;
allEvents(options?: EventOptions, cb?: Callback<EventLog>): EventEmitter;
};
once(event: "Approval", cb: Callback<Approval>): void;
once(event: "Approval", options: EventOptions, cb: Callback<Approval>): void;
once(event: "Transfer", cb: Callback<Transfer>): void;
once(event: "Transfer", options: EventOptions, cb: Callback<Transfer>): void;
}
|
import { ContractOptions } from "web3-eth-contract";
|
prebuilt_protoc_deps.bzl
|
"""
GENERATED FILE - DO NOT EDIT (created via @build_stack_rules_proto//cmd/depsgen)
"""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def _maybe(repo_rule, name, **kwargs):
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def prebuilt_protoc_deps():
prebuilt_protoc_linux() # via <TOP>
prebuilt_protoc_osx() # via <TOP>
prebuilt_protoc_windows() # via <TOP>
def prebuilt_protoc_linux():
_maybe(
http_archive,
name = "prebuilt_protoc_linux",
sha256 = "6003de742ea3fcf703cfec1cd4a3380fd143081a2eb0e559065563496af27807",
urls = [
"https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-linux-x86_64.zip",
],
build_file_content = """
|
)
""",
)
def prebuilt_protoc_osx():
_maybe(
http_archive,
name = "prebuilt_protoc_osx",
sha256 = "0decc6ce5beed07f8c20361ddeb5ac7666f09cf34572cca530e16814093f9c0c",
urls = [
"https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-osx-x86_64.zip",
],
build_file_content = """
filegroup(
name = "protoc",
srcs = ["bin/protoc"],
visibility = ["//visibility:public"],
)
""",
)
def prebuilt_protoc_windows():
_maybe(
http_archive,
name = "prebuilt_protoc_windows",
sha256 = "0decc6ce5beed07f8c20361ddeb5ac7666f09cf34572cca530e16814093f9c0c",
urls = [
"https://github.com/google/protobuf/releases/download/v3.6.1/protoc-3.6.1-win32.zip",
],
build_file_content = """
filegroup(
name = "protoc",
srcs = ["bin/protoc.exe"],
visibility = ["//visibility:public"],
)
""",
)
|
filegroup(
name = "protoc",
srcs = ["bin/protoc"],
visibility = ["//visibility:public"],
|
DefaultLabel.js
|
import React from 'react';
import PropTypes from 'prop-types';
import {
Platform,
StyleSheet,
Text,
TouchableHighlight,
View,
} from 'react-native';
const ViewPropTypes = require('react-native').ViewPropTypes || ViewPropTypes;
export default class
|
extends React.Component {
static propTypes = {
leftDiff: PropTypes.number,
labelStyle: ViewPropTypes.style,
labelTextStyle: ViewPropTypes.style,
oneMarkerValue: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
twoMarkerValue: PropTypes.oneOfType([PropTypes.string, PropTypes.number]),
oneMarkerLeftPosition: PropTypes.number,
twoMarkerLeftPosition: PropTypes.number,
};
static defaultProps = {
leftDiff: 0,
labelStyle: {},
labelTextStyle: {},
};
render() {
const {
leftDiff,
labelStyle,
labelTextStyle,
oneMarkerValue,
twoMarkerValue,
oneMarkerLeftPosition,
twoMarkerLeftPosition,
} = this.props;
return (
<View style={{ position: 'relative' }}>
<View
style={[
styles.sliderLabel,
{ left: oneMarkerLeftPosition - leftDiff },
labelStyle,
]}
>
<Text style={[styles.sliderLabelText, labelTextStyle]}>
{oneMarkerValue}
</Text>
</View>
{twoMarkerValue !== undefined && (
<View
style={[
styles.sliderLabel,
{ left: twoMarkerLeftPosition - leftDiff },
labelStyle,
]}
>
<Text style={[styles.sliderLabelText, labelTextStyle]}>
{twoMarkerValue}
</Text>
</View>
)}
</View>
);
}
}
const styles = StyleSheet.create({
sliderLabel: {
position: 'absolute',
top: -24,
minWidth: 51,
padding: 8,
backgroundColor: '#fff',
},
sliderLabelText: {
alignItems: 'center',
textAlign: 'center',
fontStyle: 'normal',
fontSize: 11,
},
});
|
DefaultLabel
|
size-it.directive.ts
|
import { Directive, Input, AfterViewInit, Output, EventEmitter } from '@angular/core';
import { MoveItService } from './move-it.service';
import { Observable, fromEvent } from 'rxjs';
import { mergeMap, map, takeUntil, tap } from 'rxjs/operators';
import { IResizable, IPosition } from './move-it-types';
@Directive({
selector: '[ngSizeit]'
})
export class SizeItDirective implements AfterViewInit {
@Input() bounds: HTMLElement = document.body;
@Input() columnWidth = 1; // px
@Input() minWidth = 1; // columns
@Input() minHeight = 1; // columns
// Event observables
mousedown$: Observable<MouseEvent>;
mousemove$: Observable<MouseEvent>;
mouseup$: Observable<MouseEvent>;
resize$: Observable<IPosition>;
// Emitted events
@Output() mResizeStart: EventEmitter<IResizable> = new EventEmitter<IResizable>();
@Output() mResizeMove: EventEmitter<IResizable> = new EventEmitter<IResizable>();
@Output() mResizeStop: EventEmitter<IResizable> = new EventEmitter<IResizable>();
constructor(
private moveitService: MoveItService,
) { }
ngAfterViewInit() {
// Create handle
const resizeHandle = document.createElement('div');
resizeHandle.setAttribute('class', 'resize-handle');
this.moveitService.draggable.appendChild(resizeHandle);
// Create event listeners
this.mousedown$ = fromEvent(resizeHandle, 'mousedown') as Observable<MouseEvent>;
this.mousemove$ = fromEvent(document, 'mousemove') as Observable<MouseEvent>;
this.mouseup$ = fromEvent(document, 'mouseup') as Observable<MouseEvent>;
this.resize$ = this.mousedown$.pipe(
mergeMap(() => {
const mdPos: IPosition = this.onMouseDown();
return this.mousemove$.pipe(
map(mmEvent => {
const mmPos: IPosition = this.onMouseMove(mmEvent);
return {
w: mdPos.w,
h: mdPos.h,
x: mmPos.x,
y: mmPos.y,
};
}),
takeUntil(this.mouseup$.pipe(
tap(() => this.onMouseUp())
))
);
}),
);
this.resize$.subscribe(pos => {
this.resize(pos);
});
}
onMouseDown(): IPosition {
document.body.classList.add('no-select', 'resizing');
const width = parseInt(this.moveitService.draggable.style.width, 10);
const height = parseInt(this.moveitService.draggable.style.height, 10);
// Shadow div
const shadowElt = document.createElement('div');
|
shadowElt.style.height = this.moveitService.draggable.style.height;
this.bounds.appendChild(shadowElt);
const startDim: IResizable = {
item: this.moveitService.draggable,
width: width,
height: height,
};
this.mResizeStart.emit(startDim);
return {
w: width,
h: height
};
}
onMouseMove(mmEvent: MouseEvent | TouchEvent): IPosition {
const mmClientX = mmEvent instanceof MouseEvent ? mmEvent.clientX : mmEvent.touches[0].clientX;
const mmClientY = mmEvent instanceof MouseEvent ? mmEvent.clientY : mmEvent.touches[0].clientY;
return {
x: mmClientX - this.moveitService.containerDimensions.left - this.moveitService.draggableDimensions.left,
y: mmClientY - this.moveitService.containerDimensions.top + this.bounds.scrollTop - this.moveitService.draggableDimensions.top
};
}
onMouseUp(): void {
this.moveitService.clearSelection();
document.body.classList.remove('no-select', 'resizing');
this.moveitService.initDraggableDimensions();
this.moveitService.getBounds();
// Get shadow style
const shadowElt = this.bounds.querySelector('.resize-shadow') as HTMLElement;
const width = parseFloat(shadowElt.style.width);
const height = parseFloat(shadowElt.style.height);
// Copy width and height to element
this.moveitService.draggable.style.width = width + 'px';
this.moveitService.draggable.style.height = height + 'px';
const finalDim: IResizable = {
item: this.moveitService.draggable,
width: width,
height: height,
};
this.mResizeStop.emit(finalDim);
// Remove shadow
this.bounds.removeChild(shadowElt);
}
resize(pos): void {
const checkedDim = this.moveitService.checkResizeBounds(pos.x, pos.y, this.columnWidth, this.minWidth, this.minHeight);
const movingDim: IResizable = {
item: this.moveitService.draggable,
width: checkedDim.x,
height: checkedDim.y
};
this.mResizeMove.emit(movingDim);
// Shadow snapped on grid
const shadowElt = this.bounds.querySelector('.resize-shadow') as HTMLElement;
shadowElt.style.width = movingDim.width + 'px';
shadowElt.style.height = movingDim.height + 'px';
// Update element style
this.moveitService.draggable.style.width = pos.x - this.moveitService.getOffsetX() + 'px';
this.moveitService.draggable.style.height = pos.y - this.moveitService.getOffsetY() + 'px';
}
}
|
shadowElt.setAttribute('class', 'draggable resize-shadow');
shadowElt.style.transform = this.moveitService.draggable.style.transform;
shadowElt.style.width = this.moveitService.draggable.style.width;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.