prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>roger.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
from __future__ import print_function
import os
import sys
import subprocess
import re
import importlib
from cli.utils import Utils
def print_help_opt(opt, desc):
print(" {} {}".format(opt.ljust(13), desc))
def roger_help(root, commands):
print("usage: roger [-h] [-v] command [arg...]\n")
print("a command line interface to work with roger mesos.")
print("\npositional arguments:")
print_help_opt("command", "command to run.")
print_help_opt("arg", "arguments to pass to the command.")
print("\noptional arguments:")
print_help_opt("-h, --help", "show this help message and exit.")
print_help_opt("-v, --version", "show version information and exit.")
print("\ncommands:")
sys.path.append("{}/cli".format(root))
for command in commands:
description = ""
module_name = "roger_" + command
cmd_module = importlib.import_module(module_name)
try:
description = cmd_module.describe()
except Exception as e:
pass
print_help_opt(command, description)
print("\nrun: 'roger < command > -h' for more information on a command.")
def getFiles(directory):
filenames = next(os.walk(directory))[2]
return filenames
def getCommands(files):
commands = set()
for filename in files:
if filename.startswith("roger_"):
commands.add(re.split("roger_|\.", filename)[1])
return sorted(commands)
def getScriptCall(root, command, command_args):
script_call = "roger_{}.py".format(command)
for command_arg in command_args:
script_call = script_call + " {}".format(command_arg)
return script_call
def main():
root = ''
utilsObj = Utils()
own_dir = os.path.dirname(os.path.realpath(__file__))
root = os.path.abspath(os.path.join(own_dir, os.pardir))
files = getFiles("{}/cli/".format(root))
commands = getCommands(files)
if len(sys.argv) > 1:
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
roger_help(root, commands)
elif sys.argv[1] == "-v" or sys.argv[1] == "--version":
version = utilsObj.roger_version(root)
print(version)
else:
command = sys.argv[1]
command_args = sys.argv[2:]
if command in commands:
print("root: {} command: {} args: {}".format(
root, command, command_args
))
script_call = getScriptCall(root, command, command_args)
os.system(script_call)<|fim▁hole|>
if __name__ == "__main__":
main()<|fim▁end|> | else:
raise SystemExit("Command is not valid. Exiting.")
else:
raise SystemExit("No arguments found. Please refer to usage: roger -h") |
<|file_name|>cz.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Tests for the contrib/localflavor/ CZ Form Fields
tests = r"""
# CZPostalCodeField #########################################################
>>> from django.contrib.localflavor.cz.forms import CZPostalCodeField
>>> f = CZPostalCodeField()
>>> f.clean('84545x')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('91909')
u'91909'
>>> f.clean('917 01')
u'91701'
>>> f.clean('12345')
u'12345'
>>> f.clean('123456')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('1234')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
>>> f.clean('123 4')
Traceback (most recent call last):
...
ValidationError: [u'Enter a postal code in the format XXXXX or XXX XX.']
# CZRegionSelect ############################################################
>>> from django.contrib.localflavor.cz.forms import CZRegionSelect
>>> w = CZRegionSelect()
>>> w.render('regions', 'TT')
u'<select name="regions">\n<option value="PR">Prague</option>\n<option value="CE">Central Bohemian Region</option>\n<option value="SO">South Bohemian Region</option>\n<option value="PI">Pilsen Region</option>\n<option value="CA">Carlsbad Region</option>\n<option value="US">Usti Region</option>\n<option value="LB">Liberec Region</option>\n<option value="HK">Hradec Region</option>\n<option value="PA">Pardubice Region</option>\n<option value="VY">Vysocina Region</option>\n<option value="SM">South Moravian Region</option>\n<option value="OL">Olomouc Region</option>\n<option value="ZL">Zlin Region</option>\n<option value="MS">Moravian-Silesian Region</option>\n</select>'
# CZBirthNumberField ########################################################
>>> from django.contrib.localflavor.cz.forms import CZBirthNumberField
>>> f = CZBirthNumberField()
>>> f.clean('880523/1237')
u'880523/1237'
>>> f.clean('8805231237')
u'8805231237'
>>> f.clean('880523/000')
u'880523/000'
>>> f.clean('880523000')
u'880523000'
>>> f.clean('882101/0011')
u'882101/0011'
>>> f.clean('880523/1237', 'm')<|fim▁hole|>u'880523/1237'
>>> f.clean('885523/1231', 'f')
u'885523/1231'
>>> f.clean('123456/12')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('123456/12345')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('12345612')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('12345612345')
Traceback (most recent call last):
...
ValidationError: [u'Enter a birth number in the format XXXXXX/XXXX or XXXXXXXXXX.']
>>> f.clean('881523/0000', 'm')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('885223/0000', 'm')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('881223/0000', 'f')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('886523/0000', 'f')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('880523/1239')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('8805231239')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
>>> f.clean('990101/0011')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid birth number.']
# CZICNumberField ########################################################
>>> from django.contrib.localflavor.cz.forms import CZICNumberField
>>> f = CZICNumberField()
>>> f.clean('12345679')
u'12345679'
>>> f.clean('12345601')
u'12345601'
>>> f.clean('12345661')
u'12345661'
>>> f.clean('12345610')
u'12345610'
>>> f.clean('1234567')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
>>> f.clean('12345660')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
>>> f.clean('12345600')
Traceback (most recent call last):
...
ValidationError: [u'Enter a valid IC number.']
"""<|fim▁end|> | |
<|file_name|>dirs.go<|end_file_name|><|fim▁begin|>// -*- Mode: Go; indent-tabs-mode: t -*-
/*
* Copyright (C) 2014-2015 Canonical Ltd
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package dirs
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/snapcore/snapd/osutil"
"github.com/snapcore/snapd/release"
)
// the various file paths
var (
GlobalRootDir string
SnapMountDir string
DistroLibExecDir string
SnapBlobDir string
SnapDataDir string
SnapDataHomeGlob string
SnapDownloadCacheDir string
SnapAppArmorDir string
AppArmorCacheDir string
SnapAppArmorAdditionalDir string
SnapConfineAppArmorDir string
SnapSeccompDir string
SnapMountPolicyDir string
SnapUdevRulesDir string
SnapKModModulesDir string
LocaleDir string
SnapMetaDir string
SnapdSocket string
SnapSocket string
SnapRunDir string
SnapRunNsDir string
SnapRunLockDir string
SnapSeedDir string
SnapDeviceDir string
SnapAssertsDBDir string
SnapCookieDir string
SnapTrustedAccountKey string
SnapAssertsSpoolDir string
SnapSeqDir string
SnapStateFile string
SnapSystemKeyFile string
SnapRepairDir string
SnapRepairStateFile string
SnapRepairRunDir string
SnapRepairAssertsDir string
SnapRunRepairDir string
SnapRollbackDir string
SnapCacheDir string
SnapNamesFile string
SnapSectionsFile string
SnapCommandsDB string
SnapAuxStoreInfoDir string
SnapBinariesDir string
SnapServicesDir string
SnapUserServicesDir string
SnapSystemdConfDir string
SnapDesktopFilesDir string
SnapDesktopIconsDir string
SnapBusPolicyDir string
SnapModeenvFile string
SystemApparmorDir string
SystemApparmorCacheDir string
CloudMetaDataFile string
CloudInstanceDataFile string
ClassicDir string
XdgRuntimeDirBase string
XdgRuntimeDirGlob string
CompletionHelperInCore string
CompletersDir string
SystemFontsDir string
SystemLocalFontsDir string
SystemFontconfigCacheDirs []string
FreezerCgroupDir string
PidsCgroupDir string
SnapshotsDir string
ErrtrackerDbDir string
SysfsDir string
FeaturesDir string
RunMnt string
)
const (
defaultSnapMountDir = "/snap"
// These are directories which are static inside the core snap and
// can never be prefixed as they will be always absolute once we
// are in the snap confinement environment.
CoreLibExecDir = "/usr/lib/snapd"
CoreSnapMountDir = "/snap"
// Directory with snap data inside user's home
UserHomeSnapDir = "snap"
// LocalInstallBlobTempPrefix is used by local install code:
// * in daemon to spool the snap file to <SnapBlobDir>/<LocalInstallBlobTempPrefix>*
// * in snapstate to auto-cleans them up using the same prefix
LocalInstallBlobTempPrefix = ".local-install-"
)
var (
// not exported because it does not honor the global rootdir
snappyDir = filepath.Join("var", "lib", "snapd")
)
func init() {
// init the global directories at startup
root := os.Getenv("SNAPPY_GLOBAL_ROOT")
SetRootDir(root)
}
// StripRootDir strips the custom global root directory from the specified argument.
func StripRootDir(dir string) string {
if !filepath.IsAbs(dir) {
panic(fmt.Sprintf("supplied path is not absolute %q", dir))
}
if !strings.HasPrefix(dir, GlobalRootDir) {
panic(fmt.Sprintf("supplied path is not related to global root %q", dir))
}
result, err := filepath.Rel(GlobalRootDir, dir)
if err != nil {
panic(err)
}
return "/" + result
}
// SupportsClassicConfinement returns true if the current directory layout supports classic confinement.
func SupportsClassicConfinement() bool {
// Core systems don't support classic confinement as a policy decision.
if !release.OnClassic {
return false
}
// Classic systems support classic confinement if using the primary mount
// location for snaps, that is /snap or if using the alternate mount
// location, /var/lib/snapd/snap along with the /snap ->
// /var/lib/snapd/snap symlink in place.
smd := filepath.Join(GlobalRootDir, defaultSnapMountDir)
if SnapMountDir == smd {
return true
}
fi, err := os.Lstat(smd)
if err == nil && fi.Mode()&os.ModeSymlink != 0 {
if target, err := filepath.EvalSymlinks(smd); err == nil {
if target == SnapMountDir {
return true
}
}
}
return false
}
var metaSnapPath = "/meta/snap.yaml"
// isInsideBaseSnap returns true if the process is inside a base snap environment.
//
// The things that count as a base snap are:
// - any base snap mounted at /
// - any os snap mounted at /
func isInsideBaseSnap() (bool, error) {
_, err := os.Stat(metaSnapPath)
if err != nil && os.IsNotExist(err) {
return false, nil
}
return err == nil, err
}
// SnapBlobDirUnder returns the path to the snap blob dir under rootdir.
func SnapBlobDirUnder(rootdir string) string {
return filepath.Join(rootdir, snappyDir, "snaps")
}
// SnapSeedDirUnder returns the path to the snap seed dir under rootdir.
func SnapSeedDirUnder(rootdir string) string {
return filepath.Join(rootdir, snappyDir, "seed")
}
// SnapStateFileUnder returns the path to snapd state file under rootdir.
func SnapStateFileUnder(rootdir string) string {
return filepath.Join(rootdir, snappyDir, "state.json")
}
// SnapModeenvFileUnder returns the path to the modeenv file under rootdir.
func SnapModeenvFileUnder(rootdir string) string {
return filepath.Join(rootdir, snappyDir, "modeenv")
}
// SetRootDir allows settings a new global root directory, this is useful
// for e.g. chroot operations
func SetRootDir(rootdir string) {
if rootdir == "" {
rootdir = "/"
}
GlobalRootDir = rootdir
altDirDistros := []string{
"antergos",
"arch",
"archlinux",
"fedora",
"manjaro",
"manjaro-arm",
}
isInsideBase, _ := isInsideBaseSnap()
if !isInsideBase && release.DistroLike(altDirDistros...) {
SnapMountDir = filepath.Join(rootdir, "/var/lib/snapd/snap")
} else {
SnapMountDir = filepath.Join(rootdir, defaultSnapMountDir)
}
SnapDataDir = filepath.Join(rootdir, "/var/snap")
SnapDataHomeGlob = filepath.Join(rootdir, "/home/*/", UserHomeSnapDir)
SnapAppArmorDir = filepath.Join(rootdir, snappyDir, "apparmor", "profiles")
SnapConfineAppArmorDir = filepath.Join(rootdir, snappyDir, "apparmor", "snap-confine")
AppArmorCacheDir = filepath.Join(rootdir, "/var/cache/apparmor")
SnapAppArmorAdditionalDir = filepath.Join(rootdir, snappyDir, "apparmor", "additional")
SnapDownloadCacheDir = filepath.Join(rootdir, snappyDir, "cache")
SnapSeccompDir = filepath.Join(rootdir, snappyDir, "seccomp", "bpf")
SnapMountPolicyDir = filepath.Join(rootdir, snappyDir, "mount")
SnapMetaDir = filepath.Join(rootdir, snappyDir, "meta")
SnapBlobDir = SnapBlobDirUnder(rootdir)
// ${snappyDir}/desktop is added to $XDG_DATA_DIRS.
// Subdirectories are interpreted according to the relevant
// freedesktop.org specifications
SnapDesktopFilesDir = filepath.Join(rootdir, snappyDir, "desktop", "applications")
SnapDesktopIconsDir = filepath.Join(rootdir, snappyDir, "desktop", "icons")
SnapRunDir = filepath.Join(rootdir, "/run/snapd")
SnapRunNsDir = filepath.Join(SnapRunDir, "/ns")
SnapRunLockDir = filepath.Join(SnapRunDir, "/lock")
// keep in sync with the debian/snapd.socket file:
SnapdSocket = filepath.Join(rootdir, "/run/snapd.socket")
SnapSocket = filepath.Join(rootdir, "/run/snapd-snap.socket")
SnapAssertsDBDir = filepath.Join(rootdir, snappyDir, "assertions")
SnapCookieDir = filepath.Join(rootdir, snappyDir, "cookie")
SnapAssertsSpoolDir = filepath.Join(rootdir, "run/snapd/auto-import")
SnapSeqDir = filepath.Join(rootdir, snappyDir, "sequence")
SnapStateFile = SnapStateFileUnder(rootdir)
SnapSystemKeyFile = filepath.Join(rootdir, snappyDir, "system-key")
SnapCacheDir = filepath.Join(rootdir, "/var/cache/snapd")
SnapNamesFile = filepath.Join(SnapCacheDir, "names")
SnapSectionsFile = filepath.Join(SnapCacheDir, "sections")
SnapCommandsDB = filepath.Join(SnapCacheDir, "commands.db")
SnapAuxStoreInfoDir = filepath.Join(SnapCacheDir, "aux")
SnapSeedDir = SnapSeedDirUnder(rootdir)
SnapDeviceDir = filepath.Join(rootdir, snappyDir, "device")
SnapModeenvFile = SnapModeenvFileUnder(rootdir)
SnapRepairDir = filepath.Join(rootdir, snappyDir, "repair")
SnapRepairStateFile = filepath.Join(SnapRepairDir, "repair.json")
SnapRepairRunDir = filepath.Join(SnapRepairDir, "run")
SnapRepairAssertsDir = filepath.Join(SnapRepairDir, "assertions")
SnapRunRepairDir = filepath.Join(SnapRunDir, "repair")
SnapRollbackDir = filepath.Join(rootdir, snappyDir, "rollback")
SnapBinariesDir = filepath.Join(SnapMountDir, "bin")
SnapServicesDir = filepath.Join(rootdir, "/etc/systemd/system")
SnapUserServicesDir = filepath.Join(rootdir, "/etc/systemd/user")
SnapSystemdConfDir = filepath.Join(rootdir, "/etc/systemd/system.conf.d")
SnapBusPolicyDir = filepath.Join(rootdir, "/etc/dbus-1/system.d")
SystemApparmorDir = filepath.Join(rootdir, "/etc/apparmor.d")
SystemApparmorCacheDir = filepath.Join(rootdir, "/etc/apparmor.d/cache")
exists, isDir, _ := osutil.DirExists(SystemApparmorCacheDir)
if !exists || !isDir {
// some systems use a single cache dir instead of splitting
// out the system cache
SystemApparmorCacheDir = AppArmorCacheDir
}
CloudMetaDataFile = filepath.Join(rootdir, "/var/lib/cloud/seed/nocloud-net/meta-data")
CloudInstanceDataFile = filepath.Join(rootdir, "/run/cloud-init/instance-data.json")
SnapUdevRulesDir = filepath.Join(rootdir, "/etc/udev/rules.d")
SnapKModModulesDir = filepath.Join(rootdir, "/etc/modules-load.d/")
LocaleDir = filepath.Join(rootdir, "/usr/share/locale")
ClassicDir = filepath.Join(rootdir, "/writable/classic")
if release.DistroLike("fedora") {
// rhel, centos, fedora and derivatives
// both rhel and centos list "fedora" in ID_LIKE
DistroLibExecDir = filepath.Join(rootdir, "/usr/libexec/snapd")
} else {
DistroLibExecDir = filepath.Join(rootdir, "/usr/lib/snapd")
}
XdgRuntimeDirBase = filepath.Join(rootdir, "/run/user")
XdgRuntimeDirGlob = filepath.Join(XdgRuntimeDirBase, "*/")
CompletionHelperInCore = filepath.Join(CoreLibExecDir, "etelpmoc.sh")
CompletersDir = filepath.Join(rootdir, "/usr/share/bash-completion/completions/")
// These paths agree across all supported distros
SystemFontsDir = filepath.Join(rootdir, "/usr/share/fonts")
SystemLocalFontsDir = filepath.Join(rootdir, "/usr/local/share/fonts")
// The cache path is true for Ubuntu, Debian, openSUSE, Arch
SystemFontconfigCacheDirs = []string{filepath.Join(rootdir, "/var/cache/fontconfig")}
if release.DistroLike("fedora") && !release.DistroLike("amzn") {
// Applies to Fedora and CentOS, Amazon Linux 2 is behind with
// updates to fontconfig and uses /var/cache/fontconfig instead,<|fim▁hole|> // https://fedoraproject.org/wiki/Changes/FontconfigCacheDirChange
// https://bugzilla.redhat.com/show_bug.cgi?id=1416380
// https://bugzilla.redhat.com/show_bug.cgi?id=1377367
//
// However, snaps may still use older libfontconfig, which fails
// to parse the new config and defaults to
// /var/cache/fontconfig. In this case we need to make both
// locations available
SystemFontconfigCacheDirs = append(SystemFontconfigCacheDirs, filepath.Join(rootdir, "/usr/lib/fontconfig/cache"))
}
FreezerCgroupDir = filepath.Join(rootdir, "/sys/fs/cgroup/freezer/")
PidsCgroupDir = filepath.Join(rootdir, "/sys/fs/cgroup/pids/")
SnapshotsDir = filepath.Join(rootdir, snappyDir, "snapshots")
ErrtrackerDbDir = filepath.Join(rootdir, snappyDir, "errtracker.db")
SysfsDir = filepath.Join(rootdir, "/sys")
FeaturesDir = filepath.Join(rootdir, snappyDir, "features")
RunMnt = filepath.Join(rootdir, "/run/mnt")
}
// what inside a (non-classic) snap is /usr/lib/snapd, outside can come from different places
func libExecOutside(base string) string {
if base == "" {
// no explicit base; core is it
return filepath.Join(SnapMountDir, "core/current/usr/lib/snapd")
}
// if a base is set, libexec comes from the snapd snap if it's
// installed, and otherwise from the distro.
p := filepath.Join(SnapMountDir, "snapd/current/usr/lib/snapd")
if st, err := os.Stat(p); err == nil && st.IsDir() {
return p
}
return DistroLibExecDir
}
func CompleteShPath(base string) string {
return filepath.Join(libExecOutside(base), "complete.sh")
}
func IsCompleteShSymlink(compPath string) bool {
target, err := os.Readlink(compPath)
return err == nil && filepath.Base(target) == "complete.sh"
}<|fim▁end|> | // see: |
<|file_name|>routers.py<|end_file_name|><|fim▁begin|>class TestRouter(object):
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
The Tribble model should be the only one to appear in the 'other' db.
"""
if model_name == 'tribble':
return db == 'other'
elif db == 'other':<|fim▁hole|><|fim▁end|> | return False |
<|file_name|>pick-target.js<|end_file_name|><|fim▁begin|>'use strict';
var type = require('type-detect');
var path = require('path');
var removeTrailingSeparator = require('remove-trailing-path-separator');
var errors = require('common-errors');
var prettyFormat = require('pretty-format');
module.exports = function (input, cb) {
if (type(input) !== 'string') {
cb(new errors.TypeError('input requires string'), null);<|fim▁hole|> var split = removeTrailingSeparator(path.normalize(input)).split(path.sep);
var inputLast = split[split.length - 1];
if (['', '.', '..'].some(function (value) {
return inputLast === value;
})) {
cb(new errors.ArgumentError('input is not allowed: ' + prettyFormat(inputLast)), null);
return;
}
cb(null, inputLast);
};<|fim▁end|> | return;
} |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""
uritemplate.api
===============
This module contains the very simple API provided by uritemplate.
"""
from uritemplate.template import URITemplate
def expand(uri, var_dict=None, **kwargs):
"""Expand the template with the given parameters.
:param str uri: The templated URI to expand
:param dict var_dict: Optional dictionary with variables and values
:param kwargs: Alternative way to pass arguments
:returns: str
Example::
expand('https://api.github.com{/end}', {'end': 'users'})
expand('https://api.github.com{/end}', end='gists')
.. note:: Passing values by both parts, may override values in
``var_dict``. For example::
expand('https://{var}', {'var': 'val1'}, var='val2')
``val2`` will be used instead of ``val1``.
"""
return URITemplate(uri).expand(var_dict, **kwargs)
def partial(uri, var_dict=None, **kwargs):
"""Partially expand the template with the given parameters.
If all of the parameters for the template are not given, return a
partially expanded template.<|fim▁hole|>
Example::
t = URITemplate('https://api.github.com{/end}')
t.partial() # => URITemplate('https://api.github.com{/end}')
"""
return URITemplate(uri).partial(var_dict, **kwargs)
def variables(uri):
"""Parse the variables of the template.
This returns all of the variable names in the URI Template.
:returns: Set of variable names
:rtype: set
Example::
variables('https://api.github.com{/end})
# => {'end'}
variables('https://api.github.com/repos{/username}{/repository}')
# => {'username', 'repository'}
"""
return set(URITemplate(uri).variable_names)<|fim▁end|> |
:param dict var_dict: Optional dictionary with variables and values
:param kwargs: Alternative way to pass arguments
:returns: :class:`URITemplate` |
<|file_name|>htmliframeelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::attr::AttrValue;
use dom::attr::AttrHelpers;
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding;
use dom::bindings::codegen::Bindings::HTMLIFrameElementBinding::HTMLIFrameElementMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::InheritTypes::{NodeCast, ElementCast, EventCast};
use dom::bindings::codegen::InheritTypes::{EventTargetCast, HTMLElementCast, HTMLIFrameElementDerived};
use dom::bindings::conversions::ToJSValConvertible;
use dom::bindings::error::{ErrorResult, Fallible};
use dom::bindings::error::Error::NotSupported;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JSRef, Temporary, OptionalRootable};
use dom::customevent::CustomEvent;
use dom::document::Document;
use dom::element::Element;
use dom::element::AttributeHandlers;
use dom::event::{Event, EventBubbles, EventCancelable, EventHelpers};
use dom::eventtarget::{EventTarget, EventTargetTypeId};
use dom::element::ElementTypeId;
use dom::htmlelement::{HTMLElement, HTMLElementTypeId};
use dom::node::{Node, NodeHelpers, NodeTypeId, window_from_node};
use dom::urlhelper::UrlHelper;
use dom::virtualmethods::VirtualMethods;
use dom::window::{Window, WindowHelpers};
use page::IterablePage;
use msg::constellation_msg::{PipelineId, SubpageId, ConstellationChan, NavigationDirection};
use msg::constellation_msg::IFrameSandboxState::{IFrameSandboxed, IFrameUnsandboxed};
use msg::constellation_msg::Msg as ConstellationMsg;
use util::opts;
use util::str::DOMString;
use string_cache::Atom;
use std::ascii::AsciiExt;
use std::borrow::ToOwned;
use std::cell::Cell;
use url::{Url, UrlParser};
enum SandboxAllowance {
AllowNothing = 0x00,
AllowSameOrigin = 0x01,
AllowTopNavigation = 0x02,
AllowForms = 0x04,
AllowScripts = 0x08,
AllowPointerLock = 0x10,
AllowPopups = 0x20
}
#[dom_struct]
pub struct HTMLIFrameElement {
htmlelement: HTMLElement,
subpage_id: Cell<Option<SubpageId>>,
containing_page_pipeline_id: Cell<Option<PipelineId>>,
sandbox: Cell<Option<u8>>,
}
impl HTMLIFrameElementDerived for EventTarget {
fn is_htmliframeelement(&self) -> bool {
*self.type_id() == EventTargetTypeId::Node(NodeTypeId::Element(ElementTypeId::HTMLElement(HTMLElementTypeId::HTMLIFrameElement)))
}
}
pub trait HTMLIFrameElementHelpers {
fn is_sandboxed(self) -> bool;
fn get_url(self) -> Option<Url>;
/// http://www.whatwg.org/html/#process-the-iframe-attributes
fn process_the_iframe_attributes(self);
fn generate_new_subpage_id(self) -> (SubpageId, Option<SubpageId>);
fn navigate_child_browsing_context(self, url: Url);
fn dispatch_mozbrowser_event(self, event_name: String, event_detail: Option<String>);
}
impl<'a> HTMLIFrameElementHelpers for JSRef<'a, HTMLIFrameElement> {
fn is_sandboxed(self) -> bool {
self.sandbox.get().is_some()
}
fn get_url(self) -> Option<Url> {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.get_attribute(ns!(""), &atom!("src")).root().and_then(|src| {
let url = src.r().value();
if url.as_slice().is_empty() {
None
} else {
let window = window_from_node(self).root();
UrlParser::new().base_url(&window.r().get_url())
.parse(url.as_slice()).ok()
}
})
}
fn generate_new_subpage_id(self) -> (SubpageId, Option<SubpageId>) {
let old_subpage_id = self.subpage_id.get();
let win = window_from_node(self).root();
let subpage_id = win.r().get_next_subpage_id();
self.subpage_id.set(Some(subpage_id));
(subpage_id, old_subpage_id)
}
fn navigate_child_browsing_context(self, url: Url) {
let sandboxed = if self.is_sandboxed() {
IFrameSandboxed
} else {
IFrameUnsandboxed
};
let window = window_from_node(self).root();
let window = window.r();
let (new_subpage_id, old_subpage_id) = self.generate_new_subpage_id();
self.containing_page_pipeline_id.set(Some(window.pipeline()));
let ConstellationChan(ref chan) = window.constellation_chan();
chan.send(ConstellationMsg::ScriptLoadedURLInIFrame(url,
window.pipeline(),
new_subpage_id,
old_subpage_id,
sandboxed)).unwrap();
if opts::experimental_enabled() {
// https://developer.mozilla.org/en-US/docs/Web/Events/mozbrowserloadstart
self.dispatch_mozbrowser_event("mozbrowserloadstart".to_owned(), None);
}
}
fn process_the_iframe_attributes(self) {
let url = match self.get_url() {
Some(url) => url.clone(),
None => Url::parse("about:blank").unwrap(),
};
self.navigate_child_browsing_context(url);
}
fn dispatch_mozbrowser_event(self, event_name: String, event_detail: Option<String>) {
// TODO(gw): Support mozbrowser event types that have detail which is not a string.
// See https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API
// for a list of mozbrowser events.
assert!(opts::experimental_enabled());
if self.Mozbrowser() {
let window = window_from_node(self).root();
let cx = window.r().get_cx();
let custom_event = CustomEvent::new(GlobalRef::Window(window.r()),
event_name.to_owned(),
true,
true,
event_detail.to_jsval(cx)).root();
let target: JSRef<EventTarget> = EventTargetCast::from_ref(self);
let event: JSRef<Event> = EventCast::from_ref(custom_event.r());
event.fire(target);
}
}
}
impl HTMLIFrameElement {
fn new_inherited(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> HTMLIFrameElement {
HTMLIFrameElement {
htmlelement: HTMLElement::new_inherited(HTMLElementTypeId::HTMLIFrameElement, localName, prefix, document),
subpage_id: Cell::new(None),
containing_page_pipeline_id: Cell::new(None),
sandbox: Cell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: DOMString, prefix: Option<DOMString>, document: JSRef<Document>) -> Temporary<HTMLIFrameElement> {
let element = HTMLIFrameElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLIFrameElementBinding::Wrap)
}
#[inline]
pub fn containing_page_pipeline_id(&self) -> Option<PipelineId> {
self.containing_page_pipeline_id.get()
}
#[inline]
pub fn subpage_id(&self) -> Option<SubpageId> {
self.subpage_id.get()
}
}
impl<'a> HTMLIFrameElementMethods for JSRef<'a, HTMLIFrameElement> {
fn Src(self) -> DOMString {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.get_string_attribute(&atom!("src"))
}
fn SetSrc(self, src: DOMString) {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.set_url_attribute(&atom!("src"), src)
}
fn Sandbox(self) -> DOMString {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.get_string_attribute(&atom!("sandbox"))
}
fn SetSandbox(self, sandbox: DOMString) {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.set_tokenlist_attribute(&atom!("sandbox"), sandbox);
}
fn GetContentWindow(self) -> Option<Temporary<Window>> {
self.subpage_id.get().and_then(|subpage_id| {
let window = window_from_node(self).root();
let window = window.r();
let children = window.page().children.borrow();
children.iter().find(|page| {
let window = page.window().root();
window.r().subpage() == Some(subpage_id)
}).map(|page| page.window())
})
}
fn GetContentDocument(self) -> Option<Temporary<Document>> {
self.GetContentWindow().root().and_then(|window| {
let self_url = match self.get_url() {
Some(self_url) => self_url,
None => return None,
};
let win_url = window_from_node(self).root().r().get_url();
if UrlHelper::SameOrigin(&self_url, &win_url) {
Some(window.r().Document())
} else {
None
}
})
}
// Experimental mozbrowser implementation is based on the webidl
// present in the gecko source tree, and the documentation here:
// https://developer.mozilla.org/en-US/docs/Web/API/Using_the_Browser_API
// TODO(gw): Use experimental codegen when it is available to avoid
// exposing these APIs. See https://github.com/servo/servo/issues/5264.
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/iframe#attr-mozbrowser
fn Mozbrowser(self) -> bool {
if opts::experimental_enabled() {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.has_attribute(&Atom::from_slice("mozbrowser"))
} else {
false
}
}
fn SetMozbrowser(self, value: bool) -> ErrorResult {
if opts::experimental_enabled() {
let element: JSRef<Element> = ElementCast::from_ref(self);
element.set_bool_attribute(&Atom::from_slice("mozbrowser"), value);
}
Ok(())
}
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLIFrameElement/goBack
fn GoBack(self) -> Fallible<()> {
if self.Mozbrowser() {
let node: JSRef<Node> = NodeCast::from_ref(self);
if node.is_in_doc() {
let window = window_from_node(self).root();
let window = window.r();
let pipeline_info = Some((self.containing_page_pipeline_id().unwrap(),
self.subpage_id().unwrap()));
let ConstellationChan(ref chan) = window.constellation_chan();
let msg = ConstellationMsg::Navigate(pipeline_info, NavigationDirection::Back);
chan.send(msg).unwrap();
}
Ok(())
} else {
debug!("this frame is not mozbrowser (or experimental_enabled is false)");
Err(NotSupported)
}
}
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLIFrameElement/goForward
fn GoForward(self) -> Fallible<()> {
if self.Mozbrowser() {
let node: JSRef<Node> = NodeCast::from_ref(self);
if node.is_in_doc() {
let window = window_from_node(self).root();
let window = window.r();
let pipeline_info = Some((self.containing_page_pipeline_id().unwrap(),
self.subpage_id().unwrap()));
let ConstellationChan(ref chan) = window.constellation_chan();
let msg = ConstellationMsg::Navigate(pipeline_info, NavigationDirection::Forward);
chan.send(msg).unwrap();
}
Ok(())
} else {
debug!("this frame is not mozbrowser (or experimental_enabled is false)");
Err(NotSupported)
}
}
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLIFrameElement/reload
fn Reload(self, _hardReload: bool) -> Fallible<()> {
Err(NotSupported)
}
// https://developer.mozilla.org/en-US/docs/Web/API/HTMLIFrameElement/stop
fn Stop(self) -> Fallible<()> {<|fim▁hole|> Err(NotSupported)
}
}
impl<'a> VirtualMethods for JSRef<'a, HTMLIFrameElement> {
fn super_type<'b>(&'b self) -> Option<&'b VirtualMethods> {
let htmlelement: &JSRef<HTMLElement> = HTMLElementCast::from_borrowed_ref(self);
Some(htmlelement as &VirtualMethods)
}
fn after_set_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.after_set_attr(attr);
}
match attr.local_name() {
&atom!("sandbox") => {
let mut modes = SandboxAllowance::AllowNothing as u8;
if let Some(ref tokens) = attr.value().tokens() {
for token in tokens.iter() {
modes |= match token.as_slice().to_ascii_lowercase().as_slice() {
"allow-same-origin" => SandboxAllowance::AllowSameOrigin,
"allow-forms" => SandboxAllowance::AllowForms,
"allow-pointer-lock" => SandboxAllowance::AllowPointerLock,
"allow-popups" => SandboxAllowance::AllowPopups,
"allow-scripts" => SandboxAllowance::AllowScripts,
"allow-top-navigation" => SandboxAllowance::AllowTopNavigation,
_ => SandboxAllowance::AllowNothing
} as u8;
}
}
self.sandbox.set(Some(modes));
},
&atom!("src") => {
let node: JSRef<Node> = NodeCast::from_ref(*self);
if node.is_in_doc() {
self.process_the_iframe_attributes()
}
},
_ => ()
}
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("sandbox") => AttrValue::from_serialized_tokenlist(value),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
fn before_remove_attr(&self, attr: JSRef<Attr>) {
if let Some(ref s) = self.super_type() {
s.before_remove_attr(attr);
}
match attr.local_name() {
&atom!("sandbox") => self.sandbox.set(None),
_ => ()
}
}
fn bind_to_tree(&self, tree_in_doc: bool) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(tree_in_doc);
}
if tree_in_doc {
self.process_the_iframe_attributes();
}
}
}<|fim▁end|> | |
<|file_name|>test_cassandra.py<|end_file_name|><|fim▁begin|># stdlib
import threading
import time
from types import ListType
import unittest
# 3p
from nose.plugins.attrib import attr
# project
from aggregator import MetricsAggregator<|fim▁hole|>
STATSD_PORT = 8121
class DummyReporter(threading.Thread):
def __init__(self, metrics_aggregator):
threading.Thread.__init__(self)
self.finished = threading.Event()
self.metrics_aggregator = metrics_aggregator
self.interval = 10
self.metrics = None
self.finished = False
self.start()
def run(self):
while not self.finished:
time.sleep(self.interval)
self.flush()
def flush(self):
metrics = self.metrics_aggregator.flush()
if metrics:
self.metrics = metrics
@attr(requires='cassandra')
class JMXTestCase(unittest.TestCase):
def setUp(self):
aggregator = MetricsAggregator("test_host")
self.server = Server(aggregator, "localhost", STATSD_PORT)
self.reporter = DummyReporter(aggregator)
self.t1 = threading.Thread(target=self.server.start)
self.t1.start()
confd_path = Fixtures.directory()
self.jmx_daemon = JMXFetch(confd_path, {'dogstatsd_port': STATSD_PORT})
self.t2 = threading.Thread(target=self.jmx_daemon.run)
self.t2.start()
def tearDown(self):
self.server.stop()
self.reporter.finished = True
self.jmx_daemon.terminate()
def testCustomJMXMetric(self):
count = 0
while self.reporter.metrics is None:
time.sleep(1)
count += 1
if count > 25:
raise Exception("No metrics were received in 25 seconds")
metrics = self.reporter.metrics
self.assertTrue(isinstance(metrics, ListType))
self.assertTrue(len(metrics) > 0)
self.assertTrue(len([t for t in metrics if "cassandra.db." in t['metric'] and "instance:cassandra_instance" in t['tags']]) > 40, metrics)<|fim▁end|> | from dogstatsd import Server
from jmxfetch import JMXFetch
from tests.checks.common import Fixtures |
<|file_name|>Create_pos&neg.py<|end_file_name|><|fim▁begin|>def create_pos_n_neg():<|fim▁hole|>
if file_type == 'pos':
line = file_type+'/'+img+' 1 0 0 50 50\n'
with open('info.dat','a') as f:
f.write(line)
elif file_type == 'neg':
line = file_type+'/'+img+'\n'
with open('bg.txt','a') as f:
f.write(line)<|fim▁end|> | for file_type in ['neg']:
for img in os.listdir(file_type): |
<|file_name|>command.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Gfx-rs Developers.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#![allow(missing_docs)]
use std::ptr;
use winapi::{FLOAT, INT, UINT, UINT8, DXGI_FORMAT,
DXGI_FORMAT_R16_UINT, DXGI_FORMAT_R32_UINT,
D3D11_CLEAR_FLAG, D3D11_PRIMITIVE_TOPOLOGY, D3D11_VIEWPORT, D3D11_RECT,
ID3D11RasterizerState, ID3D11DepthStencilState, ID3D11BlendState};
use core::{command, pso, shade, state, target, texture as tex};
use core::{IndexType, VertexCount};
use core::{MAX_VERTEX_ATTRIBUTES, MAX_CONSTANT_BUFFERS,
MAX_RESOURCE_VIEWS, MAX_UNORDERED_VIEWS,
MAX_SAMPLERS, MAX_COLOR_TARGETS};
use {native, Resources, InputLayout, Buffer, Texture, Pipeline, Program};
/// The place of some data in the data buffer.
#[derive(Clone, Copy, PartialEq, Debug)]
pub struct DataPointer {
offset: u32,
size: u32,
}
pub struct DataBuffer(Vec<u8>);
impl DataBuffer {
/// Create a new empty data buffer.
pub fn new() -> DataBuffer {
DataBuffer(Vec::new())
}
/// Reset the contents.
pub fn reset(&mut self) {
self.0.clear();
}
/// Copy a given vector slice into the buffer.
pub fn add(&mut self, data: &[u8]) -> DataPointer {
self.0.extend_from_slice(data);
DataPointer {
offset: (self.0.len() - data.len()) as u32,
size: data.len() as u32,
}
}
/// Return a reference to a stored data object.
pub fn get(&self, ptr: DataPointer) -> &[u8] {
&self.0[ptr.offset as usize .. (ptr.offset + ptr.size) as usize]
}
}
///Serialized device command.
#[derive(Clone, Copy, Debug)]
pub enum Command {
// states
BindProgram(Program),
BindInputLayout(InputLayout),
BindIndex(Buffer, DXGI_FORMAT),
BindVertexBuffers([native::Buffer; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES], [UINT; MAX_VERTEX_ATTRIBUTES]),
BindConstantBuffers(shade::Stage, [native::Buffer; MAX_CONSTANT_BUFFERS]),
BindShaderResources(shade::Stage, [native::Srv; MAX_RESOURCE_VIEWS]),
BindSamplers(shade::Stage, [native::Sampler; MAX_SAMPLERS]),
BindPixelTargets([native::Rtv; MAX_COLOR_TARGETS], native::Dsv),
SetPrimitive(D3D11_PRIMITIVE_TOPOLOGY),
SetViewport(D3D11_VIEWPORT),
SetScissor(D3D11_RECT),
SetRasterizer(*const ID3D11RasterizerState),
SetDepthStencil(*const ID3D11DepthStencilState, UINT),
SetBlend(*const ID3D11BlendState, [FLOAT; 4], UINT),
// resource updates
UpdateBuffer(Buffer, DataPointer, usize),
UpdateTexture(Texture, tex::Kind, Option<tex::CubeFace>, DataPointer, tex::RawImageInfo),
GenerateMips(native::Srv),
// drawing
ClearColor(native::Rtv, [f32; 4]),
ClearDepthStencil(native::Dsv, D3D11_CLEAR_FLAG, FLOAT, UINT8),
Draw(UINT, UINT),
DrawInstanced(UINT, UINT, UINT, UINT),
DrawIndexed(UINT, UINT, INT),
DrawIndexedInstanced(UINT, UINT, UINT, INT, UINT),
}
unsafe impl Send for Command {}
struct Cache {
attrib_strides: [Option<pso::ElemStride>; MAX_VERTEX_ATTRIBUTES],
rasterizer: *const ID3D11RasterizerState,
depth_stencil: *const ID3D11DepthStencilState,
stencil_ref: UINT,
blend: *const ID3D11BlendState,
blend_ref: [FLOAT; 4],
}
unsafe impl Send for Cache {}
impl Cache {
fn new() -> Cache {
Cache {
attrib_strides: [None; MAX_VERTEX_ATTRIBUTES],
rasterizer: ptr::null(),
depth_stencil: ptr::null(),
stencil_ref: 0,
blend: ptr::null(),
blend_ref: [0.0; 4],
}
}
}
pub struct CommandBuffer<P> {
pub parser: P,
cache: Cache,
}
pub trait Parser: Sized + Send {
fn reset(&mut self);
fn parse(&mut self, Command);
fn update_buffer(&mut self, Buffer, &[u8], usize);
fn update_texture(&mut self, Texture, tex::Kind, Option<tex::CubeFace>, &[u8], tex::RawImageInfo);
}
impl<P: Parser> From<P> for CommandBuffer<P> {
fn from(parser: P) -> CommandBuffer<P> {
CommandBuffer {
parser: parser,
cache: Cache::new(),
}
}
}
impl<P: Parser> CommandBuffer<P> {
fn flush(&mut self) {
let sample_mask = !0; //TODO
self.parser.parse(Command::SetDepthStencil(self.cache.depth_stencil, self.cache.stencil_ref));
self.parser.parse(Command::SetBlend(self.cache.blend, self.cache.blend_ref, sample_mask));
}
}
impl<P: Parser> command::Buffer<Resources> for CommandBuffer<P> {
fn reset(&mut self) {
self.parser.reset();
self.cache = Cache::new();
}
fn bind_pipeline_state(&mut self, pso: Pipeline) {
self.parser.parse(Command::SetPrimitive(pso.topology));
for (stride, ad_option) in self.cache.attrib_strides.iter_mut().zip(pso.attributes.iter()) {
*stride = ad_option.map(|(buf_id, _)| match pso.vertex_buffers[buf_id as usize] {
Some(ref bdesc) => bdesc.stride,
None => {
error!("Unexpected use of buffer id {}", buf_id);
0
},
});
}
if self.cache.rasterizer != pso.rasterizer {
self.cache.rasterizer = pso.rasterizer;
self.parser.parse(Command::SetRasterizer(pso.rasterizer));
}
self.cache.depth_stencil = pso.depth_stencil;
self.cache.blend = pso.blend;
self.parser.parse(Command::BindInputLayout(pso.layout));
self.parser.parse(Command::BindProgram(pso.program));
}
fn bind_vertex_buffers(&mut self, vbs: pso::VertexBufferSet<Resources>) {
//Note: assumes `bind_pipeline_state` is called prior
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_VERTEX_ATTRIBUTES];
let mut strides = [0; MAX_VERTEX_ATTRIBUTES];
let mut offsets = [0; MAX_VERTEX_ATTRIBUTES];
for i in 0 .. MAX_VERTEX_ATTRIBUTES {
match (vbs.0[i], self.cache.attrib_strides[i]) {
(None, Some(stride)) => {
error!("No vertex input provided for slot {} with stride {}", i, stride)
},
(Some((buffer, offset)), Some(stride)) => {
buffers[i] = buffer.0;
strides[i] = stride as UINT;
offsets[i] = offset as UINT;
},
(_, None) => (),
}
}
self.parser.parse(Command::BindVertexBuffers(buffers, strides, offsets));
}
fn bind_constant_buffers(&mut self, cbs: &[pso::ConstantBufferParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut buffers = [native::Buffer(ptr::null_mut()); MAX_CONSTANT_BUFFERS];
let mask = stage.into();
let mut count = 0;
for cbuf in cbs.iter() {
if cbuf.1.contains(mask) {
buffers[cbuf.2 as usize] = (cbuf.0).0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindConstantBuffers(stage, buffers));
}
}
}
fn bind_global_constant(&mut self, _: shade::Location, _: shade::UniformValue) {
error!("Global constants are not supported");
}
fn bind_resource_views(&mut self, rvs: &[pso::ResourceViewParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut views = [native::Srv(ptr::null_mut()); MAX_RESOURCE_VIEWS];
let mask = stage.into();
let mut count = 0;
for view in rvs.iter() {
if view.1.contains(mask) {
views[view.2 as usize] = view.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindShaderResources(stage, views));
}
}
}
fn bind_unordered_views(&mut self, uvs: &[pso::UnorderedViewParam<Resources>]) {
let mut views = [(); MAX_UNORDERED_VIEWS];
let mut count = 0;
for view in uvs.iter() {
views[view.2 as usize] = view.0;
count += 1;
}
if count != 0 {
unimplemented!()
//self.parser.parse(Command::BindUnorderedAccess(stage, views));
}
}
fn bind_samplers(&mut self, ss: &[pso::SamplerParam<Resources>]) {
for &stage in shade::STAGES.iter() {
let mut samplers = [native::Sampler(ptr::null_mut()); MAX_SAMPLERS];
let mask = stage.into();
let mut count = 0;
for sm in ss.iter() {
if sm.1.contains(mask) {
samplers[sm.2 as usize] = sm.0;
count += 1;
}
}
if count != 0 {
self.parser.parse(Command::BindSamplers(stage, samplers));
}
}
}
fn bind_pixel_targets(&mut self, pts: pso::PixelTargetSet<Resources>) {
if let (Some(ref d), Some(ref s)) = (pts.depth, pts.stencil) {
if d != s {<|fim▁hole|> }
let view = pts.get_view();
let viewport = D3D11_VIEWPORT {
TopLeftX: 0.0,
TopLeftY: 0.0,
Width: view.0 as f32,
Height: view.1 as f32,
MinDepth: 0.0,
MaxDepth: 1.0,
};
let mut colors = [native::Rtv(ptr::null_mut()); MAX_COLOR_TARGETS];
for i in 0 .. MAX_COLOR_TARGETS {
if let Some(c) = pts.colors[i] {
colors[i] = c;
}
}
let ds = pts.depth.unwrap_or(native::Dsv(ptr::null_mut()));
self.parser.parse(Command::BindPixelTargets(colors, ds));
self.parser.parse(Command::SetViewport(viewport));
}
fn bind_index(&mut self, buf: Buffer, itype: IndexType) {
let format = match itype {
IndexType::U16 => DXGI_FORMAT_R16_UINT,
IndexType::U32 => DXGI_FORMAT_R32_UINT,
};
self.parser.parse(Command::BindIndex(buf, format));
}
fn set_scissor(&mut self, rect: target::Rect) {
self.parser.parse(Command::SetScissor(D3D11_RECT {
left: rect.x as INT,
top: rect.y as INT,
right: (rect.x + rect.w) as INT,
bottom: (rect.y + rect.h) as INT,
}));
}
fn set_ref_values(&mut self, rv: state::RefValues) {
if rv.stencil.0 != rv.stencil.1 {
error!("Unable to set different stencil ref values for front ({}) and back ({})",
rv.stencil.0, rv.stencil.1);
}
self.cache.stencil_ref = rv.stencil.0 as UINT;
self.cache.blend_ref = rv.blend;
}
fn update_buffer(&mut self, buf: Buffer, data: &[u8], offset: usize) {
self.parser.update_buffer(buf, data, offset);
}
fn update_texture(&mut self, tex: Texture, kind: tex::Kind, face: Option<tex::CubeFace>,
data: &[u8], image: tex::RawImageInfo) {
self.parser.update_texture(tex, kind, face, data, image);
}
fn generate_mipmap(&mut self, srv: native::Srv) {
self.parser.parse(Command::GenerateMips(srv));
}
fn clear_color(&mut self, target: native::Rtv, value: command::ClearColor) {
match value {
command::ClearColor::Float(data) => {
self.parser.parse(Command::ClearColor(target, data));
},
_ => {
error!("Unable to clear int/uint target");
},
}
}
fn clear_depth_stencil(&mut self, target: native::Dsv, depth: Option<target::Depth>,
stencil: Option<target::Stencil>) {
let flags = //warning: magic constants ahead
D3D11_CLEAR_FLAG(if depth.is_some() {1} else {0}) |
D3D11_CLEAR_FLAG(if stencil.is_some() {2} else {0});
self.parser.parse(Command::ClearDepthStencil(target, flags,
depth.unwrap_or_default() as FLOAT,
stencil.unwrap_or_default() as UINT8
));
}
fn call_draw(&mut self, start: VertexCount, count: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawInstanced(
count as UINT, ninst as UINT, start as UINT, offset as UINT),
None => Command::Draw(count as UINT, start as UINT),
});
}
fn call_draw_indexed(&mut self, start: VertexCount, count: VertexCount,
base: VertexCount, instances: Option<command::InstanceParams>) {
self.flush();
self.parser.parse(match instances {
Some((ninst, offset)) => Command::DrawIndexedInstanced(
count as UINT, ninst as UINT, start as UINT, base as INT, offset as UINT),
None => Command::DrawIndexed(count as UINT, start as UINT, base as INT),
});
}
}<|fim▁end|> | error!("Depth and stencil views have to be the same");
} |
<|file_name|>test_input_poisson.py<|end_file_name|><|fim▁begin|>input_name = '../examples/diffusion/poisson.py'<|fim▁hole|>class Test( TestInput ):
pass<|fim▁end|> | output_name = 'test_poisson.vtk'
from testsBasic import TestInput |
<|file_name|>manager.js<|end_file_name|><|fim▁begin|>var _ = require('lodash'),
Promise = require('bluebird'),
IndexMapGenerator = require('./index-generator'),
PagesMapGenerator = require('./page-generator'),
PostsMapGenerator = require('./post-generator'),
UsersMapGenerator = require('./user-generator'),
TagsMapGenerator = require('./tag-generator'),
SiteMapManager;
SiteMapManager = function (opts) {
opts = opts || {};
this.initialized = false;
this.pages = opts.pages || this.createPagesGenerator(opts);
this.posts = opts.posts || this.createPostsGenerator(opts);
this.authors = opts.authors || this.createUsersGenerator(opts);
this.tags = opts.tags || this.createTagsGenerator(opts);
this.index = opts.index || this.createIndexGenerator(opts);
};
_.extend(SiteMapManager.prototype, {
createIndexGenerator: function () {
return new IndexMapGenerator(_.pick(this, 'pages', 'posts', 'authors', 'tags'));
},
createPagesGenerator: function (opts) {
return new PagesMapGenerator(opts);
},
createPostsGenerator: function (opts) {
return new PostsMapGenerator(opts);
},
createUsersGenerator: function (opts) {
return new UsersMapGenerator(opts);
},
createTagsGenerator: function (opts) {
return new TagsMapGenerator(opts);
},
init: function () {
var self = this,
initOps = [
this.pages.init(),
this.posts.init(),
this.authors.init(),
this.tags.init()
];
return Promise.all(initOps).then(function () {
self.initialized = true;
});
},
getIndexXml: function () {
if (!this.initialized) {
return '';
}
return this.index.getIndexXml();
},
getSiteMapXml: function (type) {
if (!this.initialized || !this[type]) {
return null;
}
return this[type].siteMapContent;
},
pageAdded: function (page) {
if (!this.initialized) {
return;
}
if (page.get('status') !== 'published') {
return;
}
this.pages.addUrl(page.toJSON());
},
pageEdited: function (page) {
if (!this.initialized) {
return;
}
<|fim▁hole|>
// Published status hasn't changed and it's published
if (isPublished === wasPublished && isPublished) {
this.pages.updateUrl(pageData);
} else if (!isPublished && wasPublished) {
// Handle page going from published to draft
this.pageDeleted(page);
} else if (isPublished && !wasPublished) {
// ... and draft to published
this.pageAdded(page);
}
},
pageDeleted: function (page) {
if (!this.initialized) {
return;
}
this.pages.removeUrl(page.toJSON());
},
postAdded: function (post) {
if (!this.initialized) {
return;
}
if (post.get('status') !== 'published') {
return;
}
this.posts.addUrl(post.toJSON());
},
postEdited: function (post) {
if (!this.initialized) {
return;
}
var postData = post.toJSON(),
wasPublished = post.updated('status') === 'published',
isPublished = postData.status === 'published';
// Published status hasn't changed and it's published
if (isPublished === wasPublished && isPublished) {
this.posts.updateUrl(postData);
} else if (!isPublished && wasPublished) {
// Handle post going from published to draft
this.postDeleted(post);
} else if (isPublished && !wasPublished) {
// ... and draft to published
this.postAdded(post);
}
},
postDeleted: function (post) {
if (!this.initialized) {
return;
}
this.posts.removeUrl(post.toJSON());
},
userAdded: function (user) {
if (!this.initialized) {
return;
}
this.authors.addUrl(user.toJSON());
},
userEdited: function (user) {
if (!this.initialized) {
return;
}
var userData = user.toJSON();
this.authors.updateUrl(userData);
},
userDeleted: function (user) {
if (!this.initialized) {
return;
}
this.authors.removeUrl(user.toJSON());
},
tagAdded: function (tag) {
if (!this.initialized) {
return;
}
this.tags.addUrl(tag.toJSON());
},
tagEdited: function (tag) {
if (!this.initialized) {
return;
}
this.tags.updateUrl(tag.toJSON());
},
tagDeleted: function (tag) {
if (!this.initialized) {
return;
}
this.tags.removeUrl(tag.toJSON());
},
// TODO: Call this from settings model when it's changed
permalinksUpdated: function (permalinks) {
if (!this.initialized) {
return;
}
this.posts.updatePermalinksValue(permalinks.toJSON ? permalinks.toJSON() : permalinks);
},
_refreshAllPosts: _.throttle(function () {
this.posts.refreshAllPosts();
}, 3000, {
leading: false,
trailing: true
})
});
module.exports = SiteMapManager;<|fim▁end|> |
var pageData = page.toJSON(),
wasPublished = page.updated('status') === 'published',
isPublished = pageData.status === 'published';
|
<|file_name|>auth.ts<|end_file_name|><|fim▁begin|>import {HttpClient} from '@angular/common/http';
import {Injectable} from '@angular/core';
import {Observable, ReplaySubject, throwError} from 'rxjs';
import {map, tap, switchMap} from 'rxjs/operators';
import {SocketService} from './sockets';
import {StorageService} from './storage';
@Injectable({
providedIn: 'root'
})
export class AuthService {
private session: string;
private userInfo: any;
private authEvents: ReplaySubject<{User: any, Session: string}>;
constructor(
private _http: HttpClient,
private _storage: StorageService,
private _sockets: SocketService,
) {
this.authEvents = new ReplaySubject<{User: any, Session: string}>(1);
}
private nuke() {
this._storage.clear();
this.session = undefined;
this.userInfo = undefined;
this._sockets.leave();
}
getSession() {
return this.session;
}
getUser() {
return this.userInfo;
}
hasAccess(): boolean {
<|fim▁hole|> return this.authEvents;
}
identify() {
this._http.get<{Data: any}>(`/api/auth/`)
.pipe(
map(res => res.Data)
)
.subscribe(
data => {
this.session = data.Session.Key;
this.userInfo = data.User;
this._sockets.join(data.Session.Key);
this.authEvents.next({User: data.User, Session: data.Session.Key});
},
err => console.error(err)
);
}
logIn(creds): Observable<any> {
if (!creds || !creds.Username || !creds.Password) {
return throwError('Need login creds');
}
return this._http.post<{Data: any}>('/api/login', creds)
.pipe(
map(res => res.Data),
tap(data => {
this.session = data.Session;
this.userInfo = data.User;
this._sockets.join(data.Session);
this.authEvents.next(data);
})
);
}
signUp(creds): Observable<any> {
if (!creds || !creds.Username || !creds.Email || !creds.Password) {
return throwError('Need signup creds');
}
return this._http.post('/api/signup', creds, {responseType: 'text' as 'text'})
.pipe(
switchMap(_ => this.logIn(creds))
);
}
expireSocket() {
this.userInfo = null;
this.session = null;
this.authEvents.next(null);
}
logOut(): Observable<any> {
return this._http.post('/api/logOut', null)
.pipe(
tap(
res => this.nuke(),
err => this.nuke(),
() => this.authEvents.next(null)
)
);
}
}<|fim▁end|> | return !!this.userInfo;
}
observe(): Observable<{User: any, Session: string}> {
|
<|file_name|>Setup.py<|end_file_name|><|fim▁begin|>from Screen import Screen
from Components.ActionMap import NumberActionMap
from Components.config import config, ConfigNothing, ConfigBoolean, ConfigSelection
from Components.Label import Label
from Components.SystemInfo import SystemInfo
from Components.ConfigList import ConfigListScreen
from Components.Pixmap import Pixmap
from Components.Sources.StaticText import StaticText
from Components.Sources.Boolean import Boolean
from enigma import eEnv
import xml.etree.cElementTree
# FIXME: use resolveFile!
# read the setupmenu
try:
# first we search in the current path
setupfile = file('data/setup.xml', 'r')
except:
# if not found in the current path, we use the global datadir-path
setupfile = file(eEnv.resolve('${datadir}/enigma2/setup.xml'), 'r')
setupdom = xml.etree.cElementTree.parse(setupfile)
setupfile.close()
def getConfigMenuItem(configElement):
for item in setupdom.getroot().findall('./setup/item/.'):
if item.text == configElement:
return _(item.attrib["text"]), eval(configElement)
return "", None
class SetupError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return self.msg
class SetupSummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent=parent)
self["SetupTitle"] = StaticText(parent.getTitle())
self["SetupEntry"] = StaticText("")
self["SetupValue"] = StaticText("")
self.onShow.append(self.addWatcher)
self.onHide.append(self.removeWatcher)
def addWatcher(self):
if hasattr(self.parent, "onChangedEntry"):
self.parent.onChangedEntry.append(self.selectionChanged)
self.parent["config"].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def removeWatcher(self):
if hasattr(self.parent, "onChangedEntry"):
self.parent.onChangedEntry.remove(self.selectionChanged)
self.parent["config"].onSelectionChanged.remove(self.selectionChanged)
def selectionChanged(self):
self["SetupEntry"].text = self.parent.getCurrentEntry()
self["SetupValue"].text = self.parent.getCurrentValue()
if hasattr(self.parent, "getCurrentDescription") and "description" in self.parent:
self.parent["description"].text = self.parent.getCurrentDescription()
class Setup(ConfigListScreen, Screen):
ALLOW_SUSPEND = True
def __init__(self, session, setup):
Screen.__init__(self, session)
# for the skin: first try a setup_<setupID>, then Setup
self.skinName = ["setup_" + setup, "Setup"]
self.list = []
self.force_update_list = False
xmldata = setupdom.getroot()
for x in xmldata.findall("setup"):
if x.get("key") == setup:
self.setup = x
break
self.setup_title = self.setup.get("title", "").encode("UTF-8")
self.seperation = int(self.setup.get('separation', '0'))
#check for list.entries > 0 else self.close
self["key_red"] = StaticText(_("Cancel"))
self["key_green"] = StaticText(_("OK"))
self["description"] = Label("")
self["HelpWindow"] = Pixmap()
self["HelpWindow"].hide()
self["VKeyIcon"] = Boolean(False)
self["actions"] = NumberActionMap(["SetupActions", "MenuActions"],
{
"cancel": self.keyCancel,
"save": self.keySave,
"menu": self.closeRecursive,
}, -2)
ConfigListScreen.__init__(self, self.list, session=session, on_change=self.changedEntry)
self.createSetupList()
self["config"].onSelectionChanged.append(self.__onSelectionChanged)
self.setTitle(_(self.setup_title))
def createSetupList(self):
currentItem = self["config"].getCurrent()
self.list = []
for x in self.setup:
if not x.tag:
continue
if x.tag == 'item':
item_level = int(x.get("level", 0))
if item_level > config.usage.setup_level.index:
continue
requires = x.get("requires")
if requires:
if requires.startswith('!'):
if SystemInfo.get(requires[1:], False):
continue
elif not SystemInfo.get(requires, False):
continue
conditional = x.get("conditional")
if conditional and not eval(conditional):
continue
item_text = _(x.get("text", "??").encode("UTF-8"))
item_description = _(x.get("description", " ").encode("UTF-8")) # don't change
b = eval(x.text or "")
if b == "":
continue
#add to configlist
item = b
# the first b is the item itself, ignored by the configList.
# the second one is converted to string.
if not isinstance(item, ConfigNothing):
self.list.append((item_text, item, item_description))
self["config"].setList(self.list)
if config.usage.sort_settings.value:
self["config"].list.sort()
self.moveToItem(currentItem)
def moveToItem(self, item):
if item != self["config"].getCurrent():
self["config"].setCurrentIndex(self.getIndexFromItem(item))
def getIndexFromItem(self, item):
return self["config"].list.index(item) if item in self["config"].list else 0
def changedEntry(self):
if isinstance(self["config"].getCurrent()[1], ConfigBoolean) or isinstance(self["config"].getCurrent()[1], ConfigSelection):
self.createSetupList()
def __onSelectionChanged(self):
if self.force_update_list:
self["config"].onSelectionChanged.remove(self.__onSelectionChanged)
self.createSetupList()
self["config"].onSelectionChanged.append(self.__onSelectionChanged)
self.force_update_list = False
if not (isinstance(self["config"].getCurrent()[1], ConfigBoolean) or isinstance(self["config"].getCurrent()[1], ConfigSelection)):
self.force_update_list = True
def run(self):
self.keySave()
def getSetupTitle(id):
xmldata = setupdom.getroot()
for x in xmldata.findall("setup"):
if x.get("key") == id:<|fim▁hole|> return x.get("title", "").encode("UTF-8")
raise SetupError("unknown setup id '%s'!" % repr(id))<|fim▁end|> | |
<|file_name|>status.py<|end_file_name|><|fim▁begin|>import requests
class Status(object):
SKIP_LOCALES = ['en_US']
def __init__(self, url, app=None, highlight=None):
self.url = url
self.app = app
self.highlight = highlight or []
self.data = []
self.created = None
<|fim▁hole|>
resp = requests.get(self.url)
if resp.status_code != 200:
resp.raise_for_status()
self.data = resp.json()
self.created = self.data[-1]['created']
def summary(self):
"""Generates summary data of today's state"""
self.get_data()
highlight = self.highlight
last_item = self.data[-1]
output = {}
output['app'] = self.app or 'ALL'
data = last_item['locales']
if self.app:
get_item = lambda x: x['apps'][self.app]
else:
get_item = lambda x: x
apps = data.items()[0][1]['apps'].keys()
apps.sort()
output['apps'] = apps
items = [item for item in data.items() if item[0] not in highlight]
hitems = [item for item in data.items() if item[0] in highlight]
highlighted = []
if hitems:
for loc, loc_data in sorted(hitems, key=lambda x: -x[1]['percent']):
if loc in self.SKIP_LOCALES:
continue
item = get_item(loc_data)
total = item.get('total', -1)
translated = item.get('translated', -1)
percent = item.get('percent', -1)
untranslated_words = item.get('untranslated_words', -1)
highlighted.append({
'locale': loc,
'percent': percent,
'total': total,
'translated': translated,
'untranslated': total - translated,
'untranslated_words': untranslated_words
})
output['highlighted'] = highlighted
locales = []
for loc, loc_data in sorted(items, key=lambda x: -x[1]['percent']):
if loc in self.SKIP_LOCALES:
continue
item = get_item(loc_data)
total = item.get('total', -1)
translated = item.get('translated', -1)
percent = item.get('percent', -1)
untranslated_words = item.get('untranslated_words', -1)
locales.append({
'locale': loc,
'percent': percent,
'total': total,
'translated': translated,
'untranslated': total - translated,
'untranslated_words': untranslated_words
})
output['locales'] = locales
output['created'] = self.created
return output
def _mark_movement(self, data):
"""For each item, converts to a tuple of (movement, item)"""
ret = []
prev_day = None
for i, day in enumerate(data):
if i == 0:
ret.append(('', day))
prev_day = day
continue
if prev_day > day:
item = ('down', day)
elif prev_day < day:
item = ('up', day)
else:
item = ('equal', day)
prev_day = day
ret.append(item)
return ret
def history(self):
self.get_data()
data = self.data
highlight = self.highlight
app = self.app
# Get a list of the locales we'll iterate through
locales = sorted(data[-1]['locales'].keys())
num_days = 14
# Truncate the data to what we want to look at
data = data[-num_days:]
if app:
get_data = lambda x: x['apps'][app]['percent']
else:
get_data = lambda x: x['percent']
hlocales = [loc for loc in locales if loc in highlight]
locales = [loc for loc in locales if loc not in highlight]
output = {}
output['app'] = self.app or 'All'
output['headers'] = [item['created'] for item in data]
output['highlighted'] = sorted(
(loc, self._mark_movement(get_data(day['locales'][loc]) for day in data))
for loc in hlocales
)
output['locales'] = sorted(
(loc, self._mark_movement(get_data(day['locales'].get(loc, {'percent': 0.0})) for day in data))
for loc in locales
)
output['created'] = self.created
return output<|fim▁end|> | def get_data(self):
if self.data:
return |
<|file_name|>RecordButtonUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015, 张涛.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kymjs.blog.ui.widget;
import java.io.File;
import java.io.IOException;
import org.kymjs.blog.AppConfig;
import org.kymjs.blog.R;
import org.kymjs.kjframe.ui.KJActivityStack;
import org.kymjs.kjframe.ui.ViewInject;
import org.kymjs.kjframe.utils.FileUtils;
import org.kymjs.kjframe.utils.StringUtils;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.widget.TextView;
/**
*
* {@link #RecordButton}需要的工具类
*
* @author kymjs (http://www.kymjs.com/)
*
*/
public class RecordButtonUtil {
private final static String TAG = "AudioUtil";
public static final String AUDOI_DIR = FileUtils.getSDCardPath()
+ File.separator + AppConfig.audioPath; // 录音音频保存根路径
private String mAudioPath; // 要播放的声音的路径
private boolean mIsRecording;// 是否正在录音
private boolean mIsPlaying;// 是否正在播放
private MediaRecorder mRecorder;
private MediaPlayer mPlayer;
private OnPlayListener listener;
public boolean isPlaying() {
return mIsPlaying;
}
/**
* 设置要播放的声音的路径
*
* @param path
*/
public void setAudioPath(String path) {
this.mAudioPath = path;
}
/**
* 播放声音结束时调用
*
* @param l
*/
public void setOnPlayListener(OnPlayListener l) {
this.listener = l;
}
// 初始化 录音器
private void initRecorder() {
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.AMR_NB);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mRecorder.setOutputFile(mAudioPath);
mIsRecording = true;
}
/**
* 开始录音,并保存到文件中
*/
public void recordAudio() {
initRecorder();
try {
mRecorder.prepare();
mRecorder.start();
} catch (IOException e) {
ViewInject.toast("小屁孩不听你说话了,请返回重试");<|fim▁hole|> /**
* 获取音量值,只是针对录音音量
*
* @return
*/
public int getVolumn() {
int volumn = 0;
// 录音
if (mRecorder != null && mIsRecording) {
volumn = mRecorder.getMaxAmplitude();
if (volumn != 0)
volumn = (int) (10 * Math.log(volumn) / Math.log(10)) / 5;
}
return volumn;
}
/**
* 停止录音
*/
public void stopRecord() {
if (mRecorder != null) {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
mIsRecording = false;
}
}
public void stopPlay() {
if (mPlayer != null) {
mPlayer.stop();
mPlayer.release();
mPlayer = null;
mIsPlaying = false;
if (listener != null) {
listener.stopPlay();
}
}
}
public void startPlay(String audioPath, TextView timeView) {
if (!mIsPlaying) {
if (!StringUtils.isEmpty(audioPath)) {
mPlayer = new MediaPlayer();
try {
mPlayer.setDataSource(audioPath);
mPlayer.prepare();
if (timeView != null) {
int len = (mPlayer.getDuration() + 500) / 1000;
timeView.setText(len + "s");
}
mPlayer.start();
if (listener != null) {
listener.starPlay();
}
mIsPlaying = true;
mPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
@Override
public void onCompletion(MediaPlayer mp) {
stopPlay();
}
});
} catch (Exception e) {
e.printStackTrace();
}
} else {
ViewInject.toast(KJActivityStack.create().topActivity()
.getString(R.string.record_sound_notfound));
}
} else {
stopPlay();
} // end playing
}
/**
* 开始播放
*/
public void startPlay() {
startPlay(mAudioPath, null);
}
public interface OnPlayListener {
/**
* 播放声音结束时调用
*/
void stopPlay();
/**
* 播放声音开始时调用
*/
void starPlay();
}
}<|fim▁end|> | }
}
|
<|file_name|>xmlImport.py<|end_file_name|><|fim▁begin|>from PyQt5.QtCore import pyqtSlot, QThread, pyqtSignal
import os
from PyQt5.QtWidgets import QFileDialog, QProgressDialog, QMessageBox
from PyQt5.QtCore import pyqtSlot, QObject
from books.soldiers import processData
import route_gui
from lxml import etree
import multiprocessing
import math
class XmlImport(QObject):
threadUpdateSignal = pyqtSignal(int, int, name="progressUpdate")
threadExceptionSignal = pyqtSignal(object, name="exceptionInProcess")
threadResultsSignal = pyqtSignal(dict, name="results")
finishedSignal = pyqtSignal(dict, str, name="processFinished")
def __init__(self, parent):
super(XmlImport, self).__init__(parent)
self.parent = parent
self.processCount = 0
self.result = {}
self.thread = QThread(parent = self.parent)
self.threadUpdateSignal.connect(self._updateProgressBarInMainThread)
self.threadExceptionSignal.connect(self._loadingFailed)
self.threadResultsSignal.connect(self._processFinished)
self.filepath = ""
def importOne(self, xmlEntry):
if self.processor is not None:
result = self.processor.extractOne(xmlEntry)
return result
else:
return None
@pyqtSlot()
def openXMLFile(self):
filename = QFileDialog.getOpenFileName(self.parent, "Open xml-file containing the data to be analyzed.",
".", "Person data files (*.xml);;All files (*)")
if filename[0] != "":
self.filepath = filename[0]
self.parent.setWindowTitle("Kaira " + filename[0])
self._analyzeOpenedXml(filename)
def _analyzeOpenedXml(self, file):
self.progressDialog = QProgressDialog(self.parent)
self.progressDialog.setCancelButton(None)
self.progressDialog.setLabelText("Extracting provided datafile...")
self.progressDialog.open()
self.progressDialog.setValue(0)
self.file = file
self.thread.run = self._runProcess
self.thread.start()
def _runProcess(self):
try:
xmlDataDocument = self._getXMLroot(self.file[0])
#TODO: Lue xml:n metadata
try:
#TODO: Moniprosarituki?
self.processor = route_gui.Router.get_processdata_class(xmlDataDocument.attrib["bookseries"])(self._processUpdateCallback)
result = self.processor.startExtractionProcess(xmlDataDocument, self.file[0])
self.threadResultsSignal.emit(result)
except KeyError:
raise MetadataException()
except Exception as e:
if "DEV" in os.environ and os.environ["DEV"]:
raise e
else:
print(e)
self.threadExceptionSignal.emit(e)
@pyqtSlot(int, int)
def _updateProgressBarInMainThread(self, i, max):
self.progressDialog.setRange(0, max)
self.progressDialog.setValue(i)
@pyqtSlot(object)
def _loadingFailed(self, e):
self.progressDialog.cancel()
import pymongo
errMessage = "Error in data-file. Extraction failed. Is the xml valid and in utf-8 format? More info: "
if isinstance(e, pymongo.errors.ServerSelectionTimeoutError):
errMessage = "Couldn't connect to database. Try going to '/mongodb/data/db' in application directory and deleting 'mongod.lock' file and restart application. More info: "
msgbox = QMessageBox()
msgbox.information(self.parent, "Extraction failed", errMessage + str(e))
msgbox.show()
@pyqtSlot(dict)
def _processFinished(self, result):
self.result = result
self.finishedSignal.emit(self.result, self.filepath)
def _processUpdateCallback(self, i, max):
self.threadUpdateSignal.emit(i, max)<|fim▁hole|>
def _getXMLroot(self, filepath):
#read the data in XML-format to be processed
parser = etree.XMLParser(encoding="utf-8")
tree = etree.parse(filepath, parser=parser) #ET.parse(filepath)
return tree.getroot()
class MetadataException(Exception):
def __init__(self):
self.msg = "ERROR: The document doesn't contain bookseries attribute in the beginning of the file. Couldn't import. Try " \
"to generate new xml-file from the source ocr-text or add the missing attribute to the file manually."
def __str__(self):
return repr(self.msg)<|fim▁end|> | |
<|file_name|>BlockReaderUtil.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.IOException;
/**
* For sharing between the local and remote block reader implementations.
*/
class BlockReaderUtil {<|fim▁hole|> int n = 0;
for (;;) {
int nread = reader.read(buf, offset + n, len - n);
if (nread <= 0)
return (n == 0) ? nread : n;
n += nread;
if (n >= len)
return n;
}
}
/* See {@link BlockReader#readFully(byte[], int, int)} */
public static void readFully(BlockReader reader,
byte[] buf, int off, int len) throws IOException {
int toRead = len;
while (toRead > 0) {
int ret = reader.read(buf, off, toRead);
if (ret < 0) {
throw new IOException("Premature EOF from inputStream");
}
toRead -= ret;
off += ret;
}
}
}<|fim▁end|> |
/* See {@link BlockReader#readAll(byte[], int, int)} */
public static int readAll(BlockReader reader,
byte[] buf, int offset, int len) throws IOException { |
<|file_name|>scale_button.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use libc::c_double;
use cast::GTK_SCALEBUTTON;
use ffi;
pub trait ScaleButtonTrait: ::WidgetTrait + ::ContainerTrait + ::ButtonTrait {
fn set_adjustment(&self, adjustment: &::Adjustment) -> () {
unsafe {
ffi::gtk_scale_button_set_adjustment(GTK_SCALEBUTTON(self.unwrap_widget()), adjustment.unwrap_pointer());
}
}
fn set_value(&self, value: f64) -> () {
unsafe {
ffi::gtk_scale_button_set_value(GTK_SCALEBUTTON(self.unwrap_widget()), value as c_double);
}
}
fn get_value(&self) -> f64 {
unsafe {
ffi::gtk_scale_button_get_value(GTK_SCALEBUTTON(self.unwrap_widget())) as f64
}
}
fn get_adjustment(&self) -> ::Adjustment {
unsafe {
::Adjustment::wrap_pointer(ffi::gtk_scale_button_get_adjustment(GTK_SCALEBUTTON(self.unwrap_widget())))
}<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>python.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3
import cgi
print("Content-type: text/html")
print('''
<!DOCTYPE html>
<html>
<head>
<title>Python</title>
</head>
<body>
<h1>Python</h1><|fim▁hole|> </body>
</html>
''')<|fim▁end|> | <p>Python</p>
<p>This is the article for Python</p> |
<|file_name|>consumer_group_defaults_test.go<|end_file_name|><|fim▁begin|>/*
* Copyright 2021 The Knative Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package v1alpha1
import (
"context"
"testing"
"github.com/google/go-cmp/cmp"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/utils/pointer"
)
func TestConsumerGroupSetDefaults(t *testing.T) {
tests := []struct {
name string
ctx context.Context
given *ConsumerGroup
want *ConsumerGroup
}{
{
name: "default replicas",
ctx: context.Background(),
given: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",
},
Spec: ConsumerGroupSpec{
Template: ConsumerTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
},
},
},
},
want: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",
},
Spec: ConsumerGroupSpec{
Template: ConsumerTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
},
},
Replicas: pointer.Int32Ptr(1),
},
},
},
{
name: "default selector",
ctx: context.Background(),
given: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",
},
Spec: ConsumerGroupSpec{
Template: ConsumerTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Labels: map[string]string{"app": "app"},
},
},
Replicas: pointer.Int32Ptr(1),
},
},
want: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",
},
Spec: ConsumerGroupSpec{
Template: ConsumerTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Labels: map[string]string{"app": "app"},
},
},
Replicas: pointer.Int32Ptr(1),
Selector: map[string]string{"app": "app"},
},
},
},
{
name: "default namespace",
ctx: context.Background(),
given: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",
},
Spec: ConsumerGroupSpec{
Replicas: pointer.Int32Ptr(1),
},
},
want: &ConsumerGroup{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
Name: "name",<|fim▁hole|> },
Spec: ConsumerGroupSpec{
Template: ConsumerTemplateSpec{
ObjectMeta: metav1.ObjectMeta{
Namespace: "ns",
},
},
Replicas: pointer.Int32Ptr(1),
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tt.given.SetDefaults(tt.ctx)
if diff := cmp.Diff(tt.want, tt.given); diff != "" {
t.Error("(-want, +got)", diff)
}
})
}
}<|fim▁end|> | |
<|file_name|>rpiemu.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os, Queue
import sys
from time import sleep<|fim▁hole|># External
if len(sys.argv) > 1:
print "ARGS:", str(sys.argv)
sys.path.append(os.path.dirname( sys.argv[1] ))
########################################################################
print("=== Starting RPiEmu v0.5 ===")
# Qemu python wrapper that connects to the TCP server
rpi = QemuInstance()
rpi.start()
#####################################################
from models.totumduino import TotumDuino
from models.fabtotum import FABTotum
# FABTotum model
ft = FABTotum()
# Totumduino model
td = TotumDuino(ft)
# Start a TD thread
td.run()
print("* Totumduino thread started")
# UART line parser
parser = UARTLineParser(qemu=rpi, line_handler=td.uart0_transfer)
parser.start()
parser.loop()
# Finish the TD thread
td.finish()<|fim▁end|> | from threading import Thread
from libs.qemu import QemuInstance, UARTLineParser
|
<|file_name|>gecko.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
// `data` comes from components/style/properties.mako.rs; see build.rs for more details.
<%!
from data import to_camel_case, to_camel_case_lower
from data import Keyword
%>
<%namespace name="helpers" file="/helpers.mako.rs" />
use crate::Atom;
use app_units::Au;
use crate::computed_value_flags::*;
use crate::custom_properties::CustomPropertiesMap;
use crate::gecko_bindings::bindings;
% for style_struct in data.style_structs:
use crate::gecko_bindings::structs::${style_struct.gecko_ffi_name};
use crate::gecko_bindings::bindings::Gecko_Construct_Default_${style_struct.gecko_ffi_name};
use crate::gecko_bindings::bindings::Gecko_CopyConstruct_${style_struct.gecko_ffi_name};
use crate::gecko_bindings::bindings::Gecko_Destroy_${style_struct.gecko_ffi_name};
% endfor
use crate::gecko_bindings::bindings::Gecko_CopyCounterStyle;
use crate::gecko_bindings::bindings::Gecko_CopyFontFamilyFrom;
use crate::gecko_bindings::bindings::Gecko_EnsureImageLayersLength;
use crate::gecko_bindings::bindings::Gecko_nsStyleFont_SetLang;
use crate::gecko_bindings::bindings::Gecko_nsStyleFont_CopyLangFrom;
use crate::gecko_bindings::structs;
use crate::gecko_bindings::structs::nsCSSPropertyID;
use crate::gecko_bindings::structs::mozilla::PseudoStyleType;
use crate::gecko::data::PerDocumentStyleData;
use crate::gecko::values::round_border_to_device_pixels;
use crate::logical_geometry::WritingMode;
use crate::media_queries::Device;
use crate::properties::longhands;
use crate::rule_tree::StrongRuleNode;
use crate::selector_parser::PseudoElement;
use servo_arc::{Arc, RawOffsetArc, UniqueArc};
use std::mem::{forget, MaybeUninit};
use std::{cmp, ops, ptr};
use crate::values::{self, CustomIdent, Either, KeyframesName, None_};
use crate::values::computed::{Percentage, TransitionProperty};
use crate::values::computed::BorderStyle;
use crate::values::computed::font::FontSize;
use crate::values::generics::column::ColumnCount;
pub mod style_structs {
% for style_struct in data.style_structs:
pub use super::${style_struct.gecko_struct_name} as ${style_struct.name};
unsafe impl Send for ${style_struct.name} {}
unsafe impl Sync for ${style_struct.name} {}
% endfor
}
/// FIXME(emilio): This is completely duplicated with the other properties code.
pub type ComputedValuesInner = structs::ServoComputedData;
#[repr(C)]
pub struct ComputedValues(structs::mozilla::ComputedStyle);
impl ComputedValues {
#[inline]
pub (crate) fn as_gecko_computed_style(&self) -> &structs::ComputedStyle {
&self.0
}
pub fn new(
pseudo: Option<<&PseudoElement>,
custom_properties: Option<Arc<CustomPropertiesMap>>,
writing_mode: WritingMode,
flags: ComputedValueFlags,
rules: Option<StrongRuleNode>,
visited_style: Option<Arc<ComputedValues>>,
% for style_struct in data.style_structs:
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
) -> Arc<Self> {
ComputedValuesInner::new(
custom_properties,
writing_mode,
flags,
rules,
visited_style,
% for style_struct in data.style_structs:
${style_struct.ident},
% endfor
).to_outer(pseudo)
}
pub fn default_values(doc: &structs::Document) -> Arc<Self> {
ComputedValuesInner::new(
/* custom_properties = */ None,
/* writing_mode = */ WritingMode::empty(), // FIXME(bz): This seems dubious
ComputedValueFlags::empty(),
/* rules = */ None,
/* visited_style = */ None,
% for style_struct in data.style_structs:
style_structs::${style_struct.name}::default(doc),
% endfor
).to_outer(None)
}
#[inline]
pub fn is_pseudo_style(&self) -> bool {
self.0.mPseudoType != PseudoStyleType::NotPseudo
}
#[inline]
pub fn pseudo(&self) -> Option<PseudoElement> {
if !self.is_pseudo_style() {
return None;
}
PseudoElement::from_pseudo_type(self.0.mPseudoType)
}
#[inline]
pub fn is_first_line_style(&self) -> bool {
self.pseudo() == Some(PseudoElement::FirstLine)
}
/// Returns true if the display property is changed from 'none' to others.
pub fn is_display_property_changed_from_none(
&self,
old_values: Option<<&ComputedValues>
) -> bool {
use crate::properties::longhands::display::computed_value::T as Display;
old_values.map_or(false, |old| {
let old_display_style = old.get_box().clone_display();
let new_display_style = self.get_box().clone_display();
old_display_style == Display::None &&
new_display_style != Display::None
})
}
}
impl Drop for ComputedValues {
fn drop(&mut self) {
unsafe {
bindings::Gecko_ComputedStyle_Destroy(&mut self.0);
}
}
}
unsafe impl Sync for ComputedValues {}
unsafe impl Send for ComputedValues {}
impl Clone for ComputedValues {
fn clone(&self) -> Self {
unreachable!()
}
}
impl Clone for ComputedValuesInner {
fn clone(&self) -> Self {
ComputedValuesInner {
% for style_struct in data.style_structs:
${style_struct.gecko_name}: self.${style_struct.gecko_name}.clone(),
% endfor
custom_properties: self.custom_properties.clone(),
writing_mode: self.writing_mode.clone(),
flags: self.flags.clone(),
rules: self.rules.clone(),
visited_style: self.visited_style.clone(),
}
}
}
impl ComputedValuesInner {
pub fn new(
custom_properties: Option<Arc<CustomPropertiesMap>>,
writing_mode: WritingMode,
flags: ComputedValueFlags,
rules: Option<StrongRuleNode>,
visited_style: Option<Arc<ComputedValues>>,
% for style_struct in data.style_structs:
${style_struct.ident}: Arc<style_structs::${style_struct.name}>,
% endfor
) -> Self {
Self {
custom_properties,
writing_mode,
rules,
visited_style: visited_style.map(Arc::into_raw_offset),
flags,
% for style_struct in data.style_structs:
${style_struct.gecko_name}: Arc::into_raw_offset(${style_struct.ident}),
% endfor
}
}
fn to_outer(
self,
pseudo: Option<<&PseudoElement>,
) -> Arc<ComputedValues> {
let pseudo_ty = match pseudo {
Some(p) => p.pseudo_type(),
None => structs::PseudoStyleType::NotPseudo,
};
unsafe {
let mut arc = UniqueArc::<ComputedValues>::new_uninit();
bindings::Gecko_ComputedStyle_Init(
arc.as_mut_ptr() as *mut _,
&self,
pseudo_ty,
);
// We're simulating move semantics by having C++ do a memcpy and then forgetting
// it on this end.
forget(self);
UniqueArc::assume_init(arc).shareable()
}
}
}
impl ops::Deref for ComputedValues {
type Target = ComputedValuesInner;
fn deref(&self) -> &ComputedValuesInner {
&self.0.mSource
}
}
impl ops::DerefMut for ComputedValues {
fn deref_mut(&mut self) -> &mut ComputedValuesInner {
&mut self.0.mSource
}
}
impl ComputedValuesInner {
/// Returns true if the value of the `content` property would make a
/// pseudo-element not rendered.
#[inline]
pub fn ineffective_content_property(&self) -> bool {
self.get_counters().ineffective_content_property()
}
% for style_struct in data.style_structs:
#[inline]
pub fn clone_${style_struct.name_lower}(&self) -> Arc<style_structs::${style_struct.name}> {
Arc::from_raw_offset(self.${style_struct.gecko_name}.clone())
}
#[inline]
pub fn get_${style_struct.name_lower}(&self) -> &style_structs::${style_struct.name} {
&self.${style_struct.gecko_name}
}
pub fn ${style_struct.name_lower}_arc(&self) -> &RawOffsetArc<style_structs::${style_struct.name}> {
&self.${style_struct.gecko_name}
}
#[inline]
pub fn mutate_${style_struct.name_lower}(&mut self) -> &mut style_structs::${style_struct.name} {
RawOffsetArc::make_mut(&mut self.${style_struct.gecko_name})
}
% endfor
/// Gets the raw visited style. Useful for memory reporting.
pub fn get_raw_visited_style(&self) -> &Option<RawOffsetArc<ComputedValues>> {
&self.visited_style
}
}
<%def name="declare_style_struct(style_struct)">
pub use crate::gecko_bindings::structs::mozilla::Gecko${style_struct.gecko_name} as ${style_struct.gecko_struct_name};
impl ${style_struct.gecko_struct_name} {
pub fn gecko(&self) -> &${style_struct.gecko_ffi_name} {
&self.gecko
}
pub fn gecko_mut(&mut self) -> &mut ${style_struct.gecko_ffi_name} {
&mut self.gecko
}
}
</%def>
<%def name="impl_simple_setter(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "From::from(v)")}
}
</%def>
<%def name="impl_simple_clone(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
From::from(self.gecko.${gecko_ffi_name}.clone())
}
</%def>
<%def name="impl_simple_copy(ident, gecko_ffi_name, *kwargs)">
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name}.clone();
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%!
def get_gecko_property(ffi_name, self_param = "self"):
return "%s.gecko.%s" % (self_param, ffi_name)
def set_gecko_property(ffi_name, expr):
return "self.gecko.%s = %s;" % (ffi_name, expr)
%>
<%def name="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type='u8')">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
use crate::properties::longhands::${ident}::computed_value::T as Keyword;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
let result = match v {
% for value in keyword.values_for('gecko'):
Keyword::${to_camel_case(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)},
% endfor
};
${set_gecko_property(gecko_ffi_name, "result")}
}
</%def>
<%def name="impl_keyword_clone(ident, gecko_ffi_name, keyword, cast_type='u8')">
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use crate::properties::longhands::${ident}::computed_value::T as Keyword;
// FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts
// Some constant macros in the gecko are defined as negative integer(e.g. font-stretch).
// And they are convert to signed integer in Rust bindings. We need to cast then
// as signed type when we have both signed/unsigned integer in order to use them
// as match's arms.
// Also, to use same implementation here we use casted constant if we have only singed values.
% if keyword.gecko_enum_prefix is None:
% for value in keyword.values_for('gecko'):
const ${keyword.casted_constant_name(value, cast_type)} : ${cast_type} =
structs::${keyword.gecko_constant(value)} as ${cast_type};
% endfor
match ${get_gecko_property(gecko_ffi_name)} as ${cast_type} {
% for value in keyword.values_for('gecko'):
${keyword.casted_constant_name(value, cast_type)} => Keyword::${to_camel_case(value)},
% endfor
% if keyword.gecko_inexhaustive:
_ => panic!("Found unexpected value in style struct for ${ident} property"),
% endif
}
% else:
match ${get_gecko_property(gecko_ffi_name)} {
% for value in keyword.values_for('gecko'):
structs::${keyword.gecko_constant(value)} => Keyword::${to_camel_case(value)},
% endfor
% if keyword.gecko_inexhaustive:
_ => panic!("Found unexpected value in style struct for ${ident} property"),
% endif
}
% endif
}
</%def>
<%def name="impl_keyword(ident, gecko_ffi_name, keyword, cast_type='u8', **kwargs)">
<%call expr="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type, **kwargs)"></%call>
<%call expr="impl_simple_copy(ident, gecko_ffi_name, **kwargs)"></%call>
<%call expr="impl_keyword_clone(ident, gecko_ffi_name, keyword, cast_type)"></%call>
</%def>
<%def name="impl_simple(ident, gecko_ffi_name)">
<%call expr="impl_simple_setter(ident, gecko_ffi_name)"></%call>
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
<%call expr="impl_simple_clone(ident, gecko_ffi_name)"></%call>
</%def>
<%def name="impl_absolute_length(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
${set_gecko_property(gecko_ffi_name, "v.to_i32_au()")}
}
<%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
Au(self.gecko.${gecko_ffi_name}).into()
}
</%def>
<%def name="impl_non_negative_length(ident, gecko_ffi_name, inherit_from=None,
round_to_pixels=False)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
let value = {
% if round_to_pixels:
let au_per_device_px = Au(self.gecko.mTwipsPerPixel);
round_border_to_device_pixels(Au::from(v), au_per_device_px).0
% else:
v.0.to_i32_au()
% endif
};
% if inherit_from:
self.gecko.${inherit_from} = value;
% endif
self.gecko.${gecko_ffi_name} = value;
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
% if inherit_from:
self.gecko.${inherit_from} = other.gecko.${inherit_from};
// NOTE: This is needed to easily handle the `unset` and `initial`
// keywords, which are implemented calling this function.
//
// In practice, this means that we may have an incorrect value here, but
// we'll adjust that properly in the style fixup phase.
//
// FIXME(emilio): We could clean this up a bit special-casing the reset_
// function below.
self.gecko.${gecko_ffi_name} = other.gecko.${inherit_from};
% else:
self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name};
% endif
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
Au(self.gecko.${gecko_ffi_name}).into()
}
</%def>
<%def name="impl_split_style_coord(ident, gecko_ffi_name, index)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
self.gecko.${gecko_ffi_name}.${index} = v;
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.${index} =
other.gecko.${gecko_ffi_name}.${index}.clone();
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
self.gecko.${gecko_ffi_name}.${index}.clone()
}
</%def>
<%def name="copy_sides_style_coord(ident)">
<% gecko_ffi_name = "m" + to_camel_case(ident) %>
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
% for side in SIDES:
self.gecko.${gecko_ffi_name}.data_at_mut(${side.index})
.copy_from(&other.gecko.${gecko_ffi_name}.data_at(${side.index}));
% endfor
${ caller.body() }
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
</%def>
<%def name="impl_corner_style_coord(ident, gecko_ffi_name, corner)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
self.gecko.${gecko_ffi_name}.${corner} = v;
}
#[allow(non_snake_case)]
pub fn copy_${ident}_from(&mut self, other: &Self) {
self.gecko.${gecko_ffi_name}.${corner} =
other.gecko.${gecko_ffi_name}.${corner}.clone();
}
#[allow(non_snake_case)]
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
self.gecko.${gecko_ffi_name}.${corner}.clone()
}
</%def>
<%def name="impl_logical(name, **kwargs)">
${helpers.logical_setter(name)}
</%def>
<%def name="impl_style_struct(style_struct)">
impl ${style_struct.gecko_struct_name} {
#[allow(dead_code, unused_variables)]
pub fn default(document: &structs::Document) -> Arc<Self> {
unsafe {
let mut result = UniqueArc::<Self>::new_uninit();
// FIXME(bug 1595895): Zero the memory to keep valgrind happy, but
// these looks like Valgrind false-positives at a quick glance.
ptr::write_bytes::<Self>(result.as_mut_ptr(), 0, 1);
Gecko_Construct_Default_${style_struct.gecko_ffi_name}(
result.as_mut_ptr() as *mut _,
document,
);
UniqueArc::assume_init(result).shareable()
}
}
}
impl Drop for ${style_struct.gecko_struct_name} {
fn drop(&mut self) {
unsafe {
Gecko_Destroy_${style_struct.gecko_ffi_name}(&mut *self.gecko);
}
}
}
impl Clone for ${style_struct.gecko_struct_name} {
fn clone(&self) -> Self {
unsafe {
let mut result = MaybeUninit::<Self>::uninit();
// FIXME(bug 1595895): Zero the memory to keep valgrind happy, but
// these looks like Valgrind false-positives at a quick glance.
ptr::write_bytes::<Self>(result.as_mut_ptr(), 0, 1);
Gecko_CopyConstruct_${style_struct.gecko_ffi_name}(result.as_mut_ptr() as *mut _, &*self.gecko);
result.assume_init()
}
}
}
</%def>
<%def name="impl_simple_type_with_conversion(ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
self.gecko.${gecko_ffi_name} = From::from(v)
}
<% impl_simple_copy(ident, gecko_ffi_name) %>
#[allow(non_snake_case)]
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
From::from(self.gecko.${gecko_ffi_name})
}
</%def>
<%def name="impl_font_settings(ident, gecko_type, tag_type, value_type, gecko_value_type)">
<%
gecko_ffi_name = to_camel_case_lower(ident)
%>
pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) {
let iter = v.0.iter().map(|other| structs::${gecko_type} {
mTag: other.tag.0,
mValue: other.value as ${gecko_value_type},
});
self.gecko.mFont.${gecko_ffi_name}.assign_from_iter_pod(iter);
}
pub fn copy_${ident}_from(&mut self, other: &Self) {
let iter = other.gecko.mFont.${gecko_ffi_name}.iter().map(|s| *s);
self.gecko.mFont.${gecko_ffi_name}.assign_from_iter_pod(iter);
}
pub fn reset_${ident}(&mut self, other: &Self) {
self.copy_${ident}_from(other)
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use crate::values::generics::font::{FontSettings, FontTag, ${tag_type}};
FontSettings(
self.gecko.mFont.${gecko_ffi_name}.iter().map(|gecko_font_setting| {
${tag_type} {
tag: FontTag(gecko_font_setting.mTag),
value: gecko_font_setting.mValue as ${value_type},
}
}).collect::<Vec<_>>().into_boxed_slice()
)
}
</%def>
<%def name="impl_trait(style_struct_name, skip_longhands='')">
<%
style_struct = next(x for x in data.style_structs if x.name == style_struct_name)
longhands = [x for x in style_struct.longhands
if not (skip_longhands == "*" or x.name in skip_longhands.split())]
# Types used with predefined_type()-defined properties that we can auto-generate.
predefined_types = {
"MozScriptMinSize": impl_absolute_length,
}
def longhand_method(longhand):
args = dict(ident=longhand.ident, gecko_ffi_name=longhand.gecko_ffi_name)
# get the method and pass additional keyword or type-specific arguments
if longhand.logical:
method = impl_logical
args.update(name=longhand.name)
elif longhand.keyword:
method = impl_keyword
args.update(keyword=longhand.keyword)
if "font" in longhand.ident:
args.update(cast_type=longhand.cast_type)
elif longhand.predefined_type in predefined_types:
method = predefined_types[longhand.predefined_type]
else:
method = impl_simple
method(**args)
%>
impl ${style_struct.gecko_struct_name} {
/*
* Manually-Implemented Methods.
*/
${caller.body().strip()}
/*
* Auto-Generated Methods.
*/
<%
for longhand in longhands:
longhand_method(longhand)
%>
}
</%def>
<%!
class Side(object):
def __init__(self, name, index):
self.name = name
self.ident = name.lower()
self.index = index
SIDES = [Side("Top", 0), Side("Right", 1), Side("Bottom", 2), Side("Left", 3)]
CORNERS = ["top_left", "top_right", "bottom_right", "bottom_left"]
%>
#[allow(dead_code)]
fn static_assert() {
// Note: using the above technique with an enum hits a rust bug when |structs| is in a different crate.
% for side in SIDES:
{ const DETAIL: u32 = [0][(structs::Side::eSide${side.name} as usize != ${side.index}) as usize]; let _ = DETAIL; }
% endfor
}
<% skip_border_longhands = " ".join(["border-{0}-{1}".format(x.ident, y)
for x in SIDES
for y in ["color", "style", "width"]] +
["border-{0}-radius".format(x.replace("_", "-"))
for x in CORNERS]) %>
<%self:impl_trait style_struct_name="Border"
skip_longhands="${skip_border_longhands} border-image-repeat">
% for side in SIDES:
pub fn set_border_${side.ident}_style(&mut self, v: BorderStyle) {
self.gecko.mBorderStyle[${side.index}] = v;
// This is needed because the initial mComputedBorder value is set to
// zero.
//
// In order to compute stuff, we start from the initial struct, and keep
// going down the tree applying properties.
//
// That means, effectively, that when we set border-style to something
// non-hidden, we should use the initial border instead.
//
// Servo stores the initial border-width in the initial struct, and then
// adjusts as needed in the fixup phase. This means that the initial
// struct is technically not valid without fixups, and that you lose
// pretty much any sharing of the initial struct, which is kind of
// unfortunate.
//
// Gecko has two fields for this, one that stores the "specified"
// border, and other that stores the actual computed one. That means
// that when we set border-style, border-width may change and we need to
// sync back to the specified one. This is what this function does.
//
// Note that this doesn't impose any dependency in the order of
// computation of the properties. This is only relevant if border-style
// is specified, but border-width isn't. If border-width is specified at
// some point, the two mBorder and mComputedBorder fields would be the
// same already.
//
// Once we're here, we know that we'll run style fixups, so it's fine to
// just copy the specified border here, we'll adjust it if it's
// incorrect later.
self.gecko.mComputedBorder.${side.ident} = self.gecko.mBorder.${side.ident};
}
pub fn copy_border_${side.ident}_style_from(&mut self, other: &Self) {
self.gecko.mBorderStyle[${side.index}] = other.gecko.mBorderStyle[${side.index}];
self.gecko.mComputedBorder.${side.ident} = self.gecko.mBorder.${side.ident};
}
pub fn reset_border_${side.ident}_style(&mut self, other: &Self) {
self.copy_border_${side.ident}_style_from(other);
}
#[inline]
pub fn clone_border_${side.ident}_style(&self) -> BorderStyle {
self.gecko.mBorderStyle[${side.index}]
}
<% impl_simple("border_%s_color" % side.ident, "mBorder%sColor" % side.name) %>
<% impl_non_negative_length("border_%s_width" % side.ident,
"mComputedBorder.%s" % side.ident,
inherit_from="mBorder.%s" % side.ident,
round_to_pixels=True) %>
pub fn border_${side.ident}_has_nonzero_width(&self) -> bool {
self.gecko.mComputedBorder.${side.ident} != 0
}
% endfor
% for corner in CORNERS:
<% impl_corner_style_coord("border_%s_radius" % corner,
"mBorderRadius",
corner) %>
% endfor
<%
border_image_repeat_keywords = ["Stretch", "Repeat", "Round", "Space"]
%>
pub fn set_border_image_repeat(&mut self, v: longhands::border_image_repeat::computed_value::T) {
use crate::values::specified::border::BorderImageRepeatKeyword;
use crate::gecko_bindings::structs::StyleBorderImageRepeat;
% for i, side in enumerate(["H", "V"]):
self.gecko.mBorderImageRepeat${side} = match v.${i} {
% for keyword in border_image_repeat_keywords:
BorderImageRepeatKeyword::${keyword} => StyleBorderImageRepeat::${keyword},
% endfor
};
% endfor
}
pub fn copy_border_image_repeat_from(&mut self, other: &Self) {
self.gecko.mBorderImageRepeatH = other.gecko.mBorderImageRepeatH;
self.gecko.mBorderImageRepeatV = other.gecko.mBorderImageRepeatV;
}
pub fn reset_border_image_repeat(&mut self, other: &Self) {
self.copy_border_image_repeat_from(other)
}
pub fn clone_border_image_repeat(&self) -> longhands::border_image_repeat::computed_value::T {
use crate::values::specified::border::BorderImageRepeatKeyword;
use crate::gecko_bindings::structs::StyleBorderImageRepeat;
% for side in ["H", "V"]:
let servo_${side.lower()} = match self.gecko.mBorderImageRepeat${side} {
% for keyword in border_image_repeat_keywords:
StyleBorderImageRepeat::${keyword} => BorderImageRepeatKeyword::${keyword},
% endfor
};
% endfor
longhands::border_image_repeat::computed_value::T(servo_h, servo_v)
}
</%self:impl_trait>
<% skip_scroll_margin_longhands = " ".join(["scroll-margin-%s" % x.ident for x in SIDES]) %>
<% skip_margin_longhands = " ".join(["margin-%s" % x.ident for x in SIDES]) %>
<%self:impl_trait style_struct_name="Margin"
skip_longhands="${skip_margin_longhands}
${skip_scroll_margin_longhands}">
% for side in SIDES:
<% impl_split_style_coord("margin_%s" % side.ident,
"mMargin",
side.index) %>
<% impl_split_style_coord("scroll_margin_%s" % side.ident,
"mScrollMargin",
side.index) %>
% endfor
</%self:impl_trait>
<% skip_scroll_padding_longhands = " ".join(["scroll-padding-%s" % x.ident for x in SIDES]) %>
<% skip_padding_longhands = " ".join(["padding-%s" % x.ident for x in SIDES]) %>
<%self:impl_trait style_struct_name="Padding"
skip_longhands="${skip_padding_longhands}
${skip_scroll_padding_longhands}">
% for side in SIDES:
<% impl_split_style_coord("padding_%s" % side.ident,
"mPadding",
side.index) %>
<% impl_split_style_coord("scroll_padding_%s" % side.ident, "mScrollPadding", side.index) %>
% endfor
</%self:impl_trait>
<% skip_position_longhands = " ".join(x.ident for x in SIDES) %>
<%self:impl_trait style_struct_name="Position"
skip_longhands="${skip_position_longhands}
masonry-auto-flow">
% for side in SIDES:
<% impl_split_style_coord(side.ident, "mOffset", side.index) %>
% endfor
pub fn set_computed_justify_items(&mut self, v: values::specified::JustifyItems) {
debug_assert_ne!(v.0, crate::values::specified::align::AlignFlags::LEGACY);
self.gecko.mJustifyItems.computed = v;
}
${impl_simple_type_with_conversion("masonry_auto_flow", "mMasonryAutoFlow")}
</%self:impl_trait>
<% skip_outline_longhands = " ".join("outline-style outline-width".split() +
["-moz-outline-radius-{0}".format(x.replace("_", ""))
for x in CORNERS]) %>
<%self:impl_trait style_struct_name="Outline"
skip_longhands="${skip_outline_longhands}">
pub fn set_outline_style(&mut self, v: longhands::outline_style::computed_value::T) {
self.gecko.mOutlineStyle = v;
// NB: This is needed to correctly handling the initial value of
// outline-width when outline-style changes, see the
// update_border_${side.ident} comment for more details.
self.gecko.mActualOutlineWidth = self.gecko.mOutlineWidth;
}
pub fn copy_outline_style_from(&mut self, other: &Self) {
// FIXME(emilio): Why doesn't this need to reset mActualOutlineWidth?
// Looks fishy.
self.gecko.mOutlineStyle = other.gecko.mOutlineStyle;
}
pub fn reset_outline_style(&mut self, other: &Self) {
self.copy_outline_style_from(other)
}
pub fn clone_outline_style(&self) -> longhands::outline_style::computed_value::T {
self.gecko.mOutlineStyle.clone()
}
<% impl_non_negative_length("outline_width", "mActualOutlineWidth",
inherit_from="mOutlineWidth",
round_to_pixels=True) %>
% for corner in CORNERS:
<% impl_corner_style_coord("_moz_outline_radius_%s" % corner.replace("_", ""),
"mOutlineRadius",
corner) %>
% endfor
pub fn outline_has_nonzero_width(&self) -> bool {
self.gecko.mActualOutlineWidth != 0
}
</%self:impl_trait>
<% skip_font_longhands = """font-family font-size font-size-adjust font-weight
font-style font-stretch font-synthesis -x-lang
font-variant-alternates font-variant-east-asian
font-variant-ligatures font-variant-numeric
font-language-override font-feature-settings
font-variation-settings -moz-min-font-size-ratio
-x-text-zoom""" %>
<%self:impl_trait style_struct_name="Font"
skip_longhands="${skip_font_longhands}">
// Negative numbers are invalid at parse time, but <integer> is still an
// i32.
<% impl_font_settings("font_feature_settings", "gfxFontFeature", "FeatureTagValue", "i32", "u32") %>
<% impl_font_settings("font_variation_settings", "gfxFontVariation", "VariationValue", "f32", "f32") %>
pub fn set_font_family(&mut self, v: longhands::font_family::computed_value::T) {
use crate::values::computed::font::GenericFontFamily;
let is_system_font = v.is_system_font;
self.gecko.mFont.systemFont = is_system_font;
self.gecko.mGenericID = if is_system_font {
GenericFontFamily::None
} else {
v.families.single_generic().unwrap_or(GenericFontFamily::None)
};
self.gecko.mFont.fontlist.mFontlist.mBasePtr.set_move(
v.families.shared_font_list().clone()
);
// Fixed-up if needed in Cascade::fixup_font_stuff.
self.gecko.mFont.fontlist.mDefaultFontType = GenericFontFamily::None;
}
pub fn copy_font_family_from(&mut self, other: &Self) {
unsafe { Gecko_CopyFontFamilyFrom(&mut self.gecko.mFont, &other.gecko.mFont); }
self.gecko.mGenericID = other.gecko.mGenericID;
self.gecko.mFont.systemFont = other.gecko.mFont.systemFont;
}
pub fn reset_font_family(&mut self, other: &Self) {
self.copy_font_family_from(other)
}
pub fn clone_font_family(&self) -> longhands::font_family::computed_value::T {
use crate::values::computed::font::{FontFamily, SingleFontFamily, FontFamilyList};
let fontlist = &self.gecko.mFont.fontlist;
let shared_fontlist = unsafe { fontlist.mFontlist.mBasePtr.to_safe() };
let families = if shared_fontlist.mNames.is_empty() {
let default = SingleFontFamily::Generic(fontlist.mDefaultFontType);
FontFamilyList::new(Box::new([default]))
} else {
FontFamilyList::SharedFontList(shared_fontlist)
};
FontFamily {
families,
is_system_font: self.gecko.mFont.systemFont,
}
}
pub fn unzoom_fonts(&mut self, device: &Device) {
self.gecko.mSize = device.unzoom_text(Au(self.gecko.mSize)).0;
self.gecko.mScriptUnconstrainedSize = device.unzoom_text(Au(self.gecko.mScriptUnconstrainedSize)).0;
self.gecko.mFont.size = device.unzoom_text(Au(self.gecko.mFont.size)).0;
}
pub fn copy_font_size_from(&mut self, other: &Self) {
self.gecko.mScriptUnconstrainedSize = other.gecko.mScriptUnconstrainedSize;
self.gecko.mSize = other.gecko.mScriptUnconstrainedSize;
self.gecko.mFont.size = other.gecko.mSize;
self.gecko.mFontSizeKeyword = other.gecko.mFontSizeKeyword;
// TODO(emilio): Should we really copy over these two?
self.gecko.mFontSizeFactor = other.gecko.mFontSizeFactor;
self.gecko.mFontSizeOffset = other.gecko.mFontSizeOffset;
}
pub fn reset_font_size(&mut self, other: &Self) {
self.copy_font_size_from(other)
}
pub fn set_font_size(&mut self, v: FontSize) {
let size = Au::from(v.size());
self.gecko.mScriptUnconstrainedSize = size.0;
// These two may be changed from Cascade::fixup_font_stuff.
self.gecko.mSize = size.0;
self.gecko.mFont.size = size.0;
self.gecko.mFontSizeKeyword = v.keyword_info.kw;
self.gecko.mFontSizeFactor = v.keyword_info.factor;
self.gecko.mFontSizeOffset = v.keyword_info.offset.to_i32_au();
}
pub fn clone_font_size(&self) -> FontSize {
use crate::values::specified::font::KeywordInfo;
FontSize {
size: Au(self.gecko.mSize).into(),
keyword_info: KeywordInfo {
kw: self.gecko.mFontSizeKeyword,
factor: self.gecko.mFontSizeFactor,
offset: Au(self.gecko.mFontSizeOffset).into()
}
}
}
pub fn set_font_weight(&mut self, v: longhands::font_weight::computed_value::T) {
unsafe { bindings::Gecko_FontWeight_SetFloat(&mut self.gecko.mFont.weight, v.0) };
}
${impl_simple_copy('font_weight', 'mFont.weight')}
pub fn clone_font_weight(&self) -> longhands::font_weight::computed_value::T {
let weight: f32 = unsafe {
bindings::Gecko_FontWeight_ToFloat(self.gecko.mFont.weight)
};
longhands::font_weight::computed_value::T(weight)
}
pub fn set_font_stretch(&mut self, v: longhands::font_stretch::computed_value::T) {
unsafe {
bindings::Gecko_FontStretch_SetFloat(
&mut self.gecko.mFont.stretch,
v.value(),
)
};
}
${impl_simple_copy('font_stretch', 'mFont.stretch')}
pub fn clone_font_stretch(&self) -> longhands::font_stretch::computed_value::T {
use crate::values::computed::font::FontStretch;
use crate::values::computed::Percentage;
use crate::values::generics::NonNegative;
let stretch =
unsafe { bindings::Gecko_FontStretch_ToFloat(self.gecko.mFont.stretch) };
debug_assert!(stretch >= 0.);
FontStretch(NonNegative(Percentage(stretch)))
}
pub fn set_font_style(&mut self, v: longhands::font_style::computed_value::T) {
use crate::values::generics::font::FontStyle;
let s = &mut self.gecko.mFont.style;
unsafe {
match v {
FontStyle::Normal => bindings::Gecko_FontSlantStyle_SetNormal(s),
FontStyle::Italic => bindings::Gecko_FontSlantStyle_SetItalic(s),
FontStyle::Oblique(ref angle) => {
bindings::Gecko_FontSlantStyle_SetOblique(s, angle.0.degrees())
}
}
}
}
${impl_simple_copy('font_style', 'mFont.style')}
pub fn clone_font_style(&self) -> longhands::font_style::computed_value::T {
use crate::values::computed::font::FontStyle;
FontStyle::from_gecko(self.gecko.mFont.style)
}
${impl_simple_type_with_conversion("font_synthesis", "mFont.synthesis")}
${impl_simple("font_variant_alternates", "mFont.variantAlternates")}
pub fn set_font_size_adjust(&mut self, v: longhands::font_size_adjust::computed_value::T) {
use crate::properties::longhands::font_size_adjust::computed_value::T;
match v {
T::None => self.gecko.mFont.sizeAdjust = -1.0 as f32,
T::Number(n) => self.gecko.mFont.sizeAdjust = n,
}
}
pub fn copy_font_size_adjust_from(&mut self, other: &Self) {
self.gecko.mFont.sizeAdjust = other.gecko.mFont.sizeAdjust;
}
pub fn reset_font_size_adjust(&mut self, other: &Self) {
self.copy_font_size_adjust_from(other)
}
pub fn clone_font_size_adjust(&self) -> longhands::font_size_adjust::computed_value::T {
use crate::properties::longhands::font_size_adjust::computed_value::T;
T::from_gecko_adjust(self.gecko.mFont.sizeAdjust)
}
#[allow(non_snake_case)]
pub fn set__x_lang(&mut self, v: longhands::_x_lang::computed_value::T) {
let ptr = v.0.as_ptr();
forget(v);
unsafe {
Gecko_nsStyleFont_SetLang(&mut *self.gecko, ptr);
}
}
#[allow(non_snake_case)]
pub fn copy__x_lang_from(&mut self, other: &Self) {
unsafe {
Gecko_nsStyleFont_CopyLangFrom(&mut *self.gecko, &*other.gecko);
}
}
#[allow(non_snake_case)]
pub fn reset__x_lang(&mut self, other: &Self) {
self.copy__x_lang_from(other)
}
#[allow(non_snake_case)]
pub fn clone__x_lang(&self) -> longhands::_x_lang::computed_value::T {
longhands::_x_lang::computed_value::T(unsafe {
Atom::from_raw(self.gecko.mLanguage.mRawPtr)
})
}
#[allow(non_snake_case)]
pub fn set__x_text_zoom(&mut self, v: longhands::_x_text_zoom::computed_value::T) {
self.gecko.mAllowZoomAndMinSize = v.0;
}
#[allow(non_snake_case)]
pub fn copy__x_text_zoom_from(&mut self, other: &Self) {
self.gecko.mAllowZoomAndMinSize = other.gecko.mAllowZoomAndMinSize;
}
#[allow(non_snake_case)]
pub fn reset__x_text_zoom(&mut self, other: &Self) {
self.copy__x_text_zoom_from(other)
}
#[allow(non_snake_case)]
pub fn clone__x_text_zoom(&self) -> longhands::_x_text_zoom::computed_value::T {
longhands::_x_text_zoom::computed_value::T(self.gecko.mAllowZoomAndMinSize)
}
<% impl_simple_type_with_conversion("font_language_override", "mFont.languageOverride") %>
${impl_simple_type_with_conversion("font_variant_ligatures", "mFont.variantLigatures")}
${impl_simple_type_with_conversion("font_variant_east_asian", "mFont.variantEastAsian")}
${impl_simple_type_with_conversion("font_variant_numeric", "mFont.variantNumeric")}
#[allow(non_snake_case)]
pub fn clone__moz_min_font_size_ratio(
&self,
) -> longhands::_moz_min_font_size_ratio::computed_value::T {
Percentage(self.gecko.mMinFontSizeRatio as f32 / 100.)
}
#[allow(non_snake_case)]
pub fn set__moz_min_font_size_ratio(&mut self, v: longhands::_moz_min_font_size_ratio::computed_value::T) {
let scaled = v.0 * 100.;
let percentage = if scaled > 255. {
255.
} else if scaled < 0. {
0.
} else {
scaled
};
self.gecko.mMinFontSizeRatio = percentage as u8;
}
${impl_simple_copy('_moz_min_font_size_ratio', 'mMinFontSizeRatio')}
</%self:impl_trait>
<%def name="impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name, member=None)">
#[allow(non_snake_case)]
pub fn copy_${type}_${ident}_from(&mut self, other: &Self) {
self.gecko.m${type.capitalize()}s.ensure_len(other.gecko.m${type.capitalize()}s.len());
let count = other.gecko.m${type.capitalize()}${gecko_ffi_name}Count;
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = count;
let iter = self.gecko.m${type.capitalize()}s.iter_mut().take(count as usize).zip(
other.gecko.m${type.capitalize()}s.iter()
);
for (ours, others) in iter {
% if member:
ours.m${gecko_ffi_name}.${member} = others.m${gecko_ffi_name}.${member};
% else:
ours.m${gecko_ffi_name} = others.m${gecko_ffi_name};
% endif
}
}
#[allow(non_snake_case)]
pub fn reset_${type}_${ident}(&mut self, other: &Self) {
self.copy_${type}_${ident}_from(other)
}
</%def>
<%def name="impl_animation_or_transition_count(type, ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn ${type}_${ident}_count(&self) -> usize {
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count as usize
}
</%def>
<%def name="impl_animation_or_transition_time_value(type, ident, gecko_ffi_name)">
#[allow(non_snake_case)]
pub fn set_${type}_${ident}<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::${type}_${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
let v = v.into_iter();
debug_assert_ne!(v.len(), 0);
let input_len = v.len();
self.gecko.m${type.capitalize()}s.ensure_len(input_len);
self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = input_len as u32;
for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().take(input_len as usize).zip(v) {
gecko.m${gecko_ffi_name} = servo.seconds() * 1000.;
}
}
#[allow(non_snake_case)]
pub fn ${type}_${ident}_at(&self, index: usize)
-> longhands::${type}_${ident}::computed_value::SingleComputedValue {
use crate::values::computed::Time;
Time::from_seconds(self.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name} / 1000.)
}
${impl_animation_or_transition_count(type, ident, gecko_ffi_name)}
${impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_or_transition_timing_function(type)">
pub fn set_${type}_timing_function<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::${type}_timing_function::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
let v = v.into_iter();
debug_assert_ne!(v.len(), 0);
let input_len = v.len();
self.gecko.m${type.capitalize()}s.ensure_len(input_len);
self.gecko.m${type.capitalize()}TimingFunctionCount = input_len as u32;
for (gecko, servo) in self.gecko.m${type.capitalize()}s.iter_mut().take(input_len as usize).zip(v) {
gecko.mTimingFunction.mTiming = servo;
}
}
${impl_animation_or_transition_count(type, 'timing_function', 'TimingFunction')}
${impl_copy_animation_or_transition_value(type, 'timing_function', "TimingFunction", "mTiming")}
pub fn ${type}_timing_function_at(&self, index: usize)
-> longhands::${type}_timing_function::computed_value::SingleComputedValue {
self.gecko.m${type.capitalize()}s[index].mTimingFunction.mTiming
}
</%def>
<%def name="impl_transition_time_value(ident, gecko_ffi_name)">
${impl_animation_or_transition_time_value('transition', ident, gecko_ffi_name)}
</%def>
<%def name="impl_transition_count(ident, gecko_ffi_name)">
${impl_animation_or_transition_count('transition', ident, gecko_ffi_name)}
</%def>
<%def name="impl_copy_animation_value(ident, gecko_ffi_name)">
${impl_copy_animation_or_transition_value('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_count(ident, gecko_ffi_name)">
${impl_animation_or_transition_count('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_time_value(ident, gecko_ffi_name)">
${impl_animation_or_transition_time_value('animation', ident, gecko_ffi_name)}
</%def>
<%def name="impl_animation_keyword(ident, gecko_ffi_name, keyword, cast_type='u8')">
#[allow(non_snake_case)]
pub fn set_animation_${ident}<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::animation_${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator + Clone
{
use crate::properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword;
let v = v.into_iter();
debug_assert_ne!(v.len(), 0);
let input_len = v.len();
self.gecko.mAnimations.ensure_len(input_len);
self.gecko.mAnimation${gecko_ffi_name}Count = input_len as u32;
for (gecko, servo) in self.gecko.mAnimations.iter_mut().take(input_len as usize).zip(v) {
let result = match servo {
% for value in keyword.values_for("gecko"):
Keyword::${to_camel_case(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)},
% endfor
};
gecko.m${gecko_ffi_name} = result;
}
}
#[allow(non_snake_case)]
pub fn animation_${ident}_at(&self, index: usize)
-> longhands::animation_${ident}::computed_value::SingleComputedValue {
use crate::properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword;
match self.gecko.mAnimations[index].m${gecko_ffi_name} ${keyword.maybe_cast("u32")} {
% for value in keyword.values_for("gecko"):
structs::${keyword.gecko_constant(value)} => Keyword::${to_camel_case(value)},
% endfor
% if keyword.gecko_inexhaustive:
_ => panic!("Found unexpected value for animation-${ident}"),
% endif
}
}
${impl_animation_count(ident, gecko_ffi_name)}
${impl_copy_animation_value(ident, gecko_ffi_name)}
</%def>
<% skip_box_longhands= """display
animation-name animation-delay animation-duration
animation-direction animation-fill-mode animation-play-state
animation-iteration-count animation-timing-function
clear transition-duration transition-delay
transition-timing-function transition-property
-webkit-line-clamp""" %>
<%self:impl_trait style_struct_name="Box" skip_longhands="${skip_box_longhands}">
#[inline]
pub fn set_display(&mut self, v: longhands::display::computed_value::T) {
self.gecko.mDisplay = v;
self.gecko.mOriginalDisplay = v;
}
#[inline]
pub fn copy_display_from(&mut self, other: &Self) {
self.gecko.mDisplay = other.gecko.mDisplay;
self.gecko.mOriginalDisplay = other.gecko.mDisplay;
}
#[inline]
pub fn reset_display(&mut self, other: &Self) {
self.copy_display_from(other)
}
#[inline]
pub fn set_adjusted_display(
&mut self,
v: longhands::display::computed_value::T,
_is_item_or_root: bool
) {
self.gecko.mDisplay = v;
}
#[inline]
pub fn clone_display(&self) -> longhands::display::computed_value::T {
self.gecko.mDisplay
}
<% clear_keyword = Keyword(
"clear",
"Left Right None Both",
gecko_enum_prefix="StyleClear",
gecko_inexhaustive=True,
) %>
${impl_keyword('clear', 'mBreakType', clear_keyword)}
${impl_transition_time_value('delay', 'Delay')}
${impl_transition_time_value('duration', 'Duration')}
${impl_animation_or_transition_timing_function('transition')}
pub fn transition_combined_duration_at(&self, index: usize) -> f32 {
// https://drafts.csswg.org/css-transitions/#transition-combined-duration
self.gecko.mTransitions[index % self.gecko.mTransitionDurationCount as usize].mDuration.max(0.0)
+ self.gecko.mTransitions[index % self.gecko.mTransitionDelayCount as usize].mDelay
}
pub fn set_transition_property<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::transition_property::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_no_properties;
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_variable;
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSProperty_UNKNOWN;
let v = v.into_iter();
if v.len() != 0 {
self.gecko.mTransitions.ensure_len(v.len());
self.gecko.mTransitionPropertyCount = v.len() as u32;
for (servo, gecko) in v.zip(self.gecko.mTransitions.iter_mut()) {
unsafe { gecko.mUnknownProperty.clear() };
match servo {
TransitionProperty::Unsupported(ident) => {
gecko.mProperty = eCSSProperty_UNKNOWN;
gecko.mUnknownProperty.mRawPtr = ident.0.into_addrefed();
},
TransitionProperty::Custom(name) => {
gecko.mProperty = eCSSPropertyExtra_variable;
gecko.mUnknownProperty.mRawPtr = name.into_addrefed();
}
_ => gecko.mProperty = servo.to_nscsspropertyid().unwrap(),
}
}
} else {
// In gecko |none| is represented by eCSSPropertyExtra_no_properties.
self.gecko.mTransitionPropertyCount = 1;
self.gecko.mTransitions[0].mProperty = eCSSPropertyExtra_no_properties;
}
}
/// Returns whether there are any transitions specified.
pub fn specifies_transitions(&self) -> bool {
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_all_properties;
if self.gecko.mTransitionPropertyCount == 1 &&
self.gecko.mTransitions[0].mProperty == eCSSPropertyExtra_all_properties &&
self.transition_combined_duration_at(0) <= 0.0f32 {
return false;
}
self.gecko.mTransitionPropertyCount > 0
}
pub fn transition_property_at(&self, index: usize)
-> longhands::transition_property::computed_value::SingleComputedValue {
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_no_properties;
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_variable;
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSProperty_UNKNOWN;
let property = self.gecko.mTransitions[index].mProperty;
if property == eCSSProperty_UNKNOWN {
let atom = self.gecko.mTransitions[index].mUnknownProperty.mRawPtr;
debug_assert!(!atom.is_null());
TransitionProperty::Unsupported(CustomIdent(unsafe{
Atom::from_raw(atom)
}))
} else if property == eCSSPropertyExtra_variable {
let atom = self.gecko.mTransitions[index].mUnknownProperty.mRawPtr;
debug_assert!(!atom.is_null());
TransitionProperty::Custom(unsafe{
Atom::from_raw(atom)
})
} else if property == eCSSPropertyExtra_no_properties {
// Actually, we don't expect TransitionProperty::Unsupported also
// represents "none", but if the caller wants to convert it, it is
// fine. Please use it carefully.
//
// FIXME(emilio): This is a hack, is this reachable?
TransitionProperty::Unsupported(CustomIdent(atom!("none")))
} else {
property.into()
}
}
pub fn transition_nscsspropertyid_at(&self, index: usize) -> nsCSSPropertyID {
self.gecko.mTransitions[index].mProperty
}
pub fn copy_transition_property_from(&mut self, other: &Self) {
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSPropertyExtra_variable;
use crate::gecko_bindings::structs::nsCSSPropertyID::eCSSProperty_UNKNOWN;
self.gecko.mTransitions.ensure_len(other.gecko.mTransitions.len());
let count = other.gecko.mTransitionPropertyCount;
self.gecko.mTransitionPropertyCount = count;
for (index, transition) in self.gecko.mTransitions.iter_mut().enumerate().take(count as usize) {
transition.mProperty = other.gecko.mTransitions[index].mProperty;
unsafe { transition.mUnknownProperty.clear() };
if transition.mProperty == eCSSProperty_UNKNOWN ||
transition.mProperty == eCSSPropertyExtra_variable {
let atom = other.gecko.mTransitions[index].mUnknownProperty.mRawPtr;
debug_assert!(!atom.is_null());
transition.mUnknownProperty.mRawPtr = unsafe { Atom::from_raw(atom) }.into_addrefed();
}
}
}
pub fn reset_transition_property(&mut self, other: &Self) {
self.copy_transition_property_from(other)
}
${impl_transition_count('property', 'Property')}
pub fn animations_equals(&self, other: &Self) -> bool {
return self.gecko.mAnimationNameCount == other.gecko.mAnimationNameCount
&& self.gecko.mAnimationDelayCount == other.gecko.mAnimationDelayCount
&& self.gecko.mAnimationDirectionCount == other.gecko.mAnimationDirectionCount
&& self.gecko.mAnimationDurationCount == other.gecko.mAnimationDurationCount
&& self.gecko.mAnimationFillModeCount == other.gecko.mAnimationFillModeCount
&& self.gecko.mAnimationIterationCountCount == other.gecko.mAnimationIterationCountCount
&& self.gecko.mAnimationPlayStateCount == other.gecko.mAnimationPlayStateCount
&& self.gecko.mAnimationTimingFunctionCount == other.gecko.mAnimationTimingFunctionCount
&& unsafe { bindings::Gecko_StyleAnimationsEquals(&self.gecko.mAnimations, &other.gecko.mAnimations) }
}
pub fn set_animation_name<I>(&mut self, v: I)
where I: IntoIterator<Item = longhands::animation_name::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
let v = v.into_iter();
debug_assert_ne!(v.len(), 0);
self.gecko.mAnimations.ensure_len(v.len());
self.gecko.mAnimationNameCount = v.len() as u32;
for (servo, gecko) in v.zip(self.gecko.mAnimations.iter_mut()) {
let atom = match servo.0 {
None => atom!(""),
Some(ref name) => name.as_atom().clone(),
};
unsafe { bindings::Gecko_SetAnimationName(gecko, atom.into_addrefed()); }
}
}
pub fn animation_name_at(&self, index: usize)
-> longhands::animation_name::computed_value::SingleComputedValue {
use crate::properties::longhands::animation_name::single_value::SpecifiedValue as AnimationName;
let atom = self.gecko.mAnimations[index].mName.mRawPtr;
if atom == atom!("").as_ptr() {
return AnimationName(None)
}
AnimationName(Some(KeyframesName::from_atom(unsafe { Atom::from_raw(atom) })))
}
pub fn copy_animation_name_from(&mut self, other: &Self) {
self.gecko.mAnimationNameCount = other.gecko.mAnimationNameCount;
unsafe { bindings::Gecko_CopyAnimationNames(&mut self.gecko.mAnimations, &other.gecko.mAnimations); }
}
pub fn reset_animation_name(&mut self, other: &Self) {
self.copy_animation_name_from(other)
}
${impl_animation_count('name', 'Name')}
${impl_animation_time_value('delay', 'Delay')}
${impl_animation_time_value('duration', 'Duration')}
${impl_animation_keyword('direction', 'Direction',
data.longhands_by_name["animation-direction"].keyword)}
${impl_animation_keyword('fill_mode', 'FillMode',
data.longhands_by_name["animation-fill-mode"].keyword)}
${impl_animation_keyword('play_state', 'PlayState',
data.longhands_by_name["animation-play-state"].keyword)}
pub fn set_animation_iteration_count<I>(&mut self, v: I)
where
I: IntoIterator<Item = values::computed::AnimationIterationCount>,
I::IntoIter: ExactSizeIterator + Clone
{
use std::f32;
use crate::values::generics::box_::AnimationIterationCount;
let v = v.into_iter();
debug_assert_ne!(v.len(), 0);
let input_len = v.len();
self.gecko.mAnimations.ensure_len(input_len);
self.gecko.mAnimationIterationCountCount = input_len as u32;
for (gecko, servo) in self.gecko.mAnimations.iter_mut().take(input_len as usize).zip(v) {
match servo {
AnimationIterationCount::Number(n) => gecko.mIterationCount = n,
AnimationIterationCount::Infinite => gecko.mIterationCount = f32::INFINITY,
}
}
}
pub fn animation_iteration_count_at(
&self,
index: usize,
) -> values::computed::AnimationIterationCount {
use crate::values::generics::box_::AnimationIterationCount;
if self.gecko.mAnimations[index].mIterationCount.is_infinite() {
AnimationIterationCount::Infinite
} else {
AnimationIterationCount::Number(self.gecko.mAnimations[index].mIterationCount)
}
}
${impl_animation_count('iteration_count', 'IterationCount')}
${impl_copy_animation_value('iteration_count', 'IterationCount')}
${impl_animation_or_transition_timing_function('animation')}
#[allow(non_snake_case)]
pub fn set__webkit_line_clamp(&mut self, v: longhands::_webkit_line_clamp::computed_value::T) {
self.gecko.mLineClamp = match v {
Either::First(n) => n.0 as u32,
Either::Second(None_) => 0,
};
}
${impl_simple_copy('_webkit_line_clamp', 'mLineClamp')}
#[allow(non_snake_case)]
pub fn clone__webkit_line_clamp(&self) -> longhands::_webkit_line_clamp::computed_value::T {
match self.gecko.mLineClamp {
0 => Either::Second(None_),
n => {
debug_assert!(n <= std::i32::MAX as u32);
Either::First((n as i32).into())
}
}
}
</%self:impl_trait>
<%def name="simple_image_array_property(name, shorthand, field_name)">
<%
image_layers_field = "mImage" if shorthand == "background" else "mMask"
copy_simple_image_array_property(name, shorthand, image_layers_field, field_name)
%>
pub fn set_${shorthand}_${name}<I>(&mut self, v: I)
where I: IntoIterator<Item=longhands::${shorthand}_${name}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.len(),
LayerType::${shorthand.title()});
}
self.gecko.${image_layers_field}.${field_name}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${image_layers_field}.mLayers.iter_mut()) {
geckolayer.${field_name} = {
${caller.body()}
};
}
}
</%def>
<%def name="copy_simple_image_array_property(name, shorthand, layers_field_name, field_name)">
pub fn copy_${shorthand}_${name}_from(&mut self, other: &Self) {
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let count = other.gecko.${layers_field_name}.${field_name}Count;
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${layers_field_name},
count as usize,
LayerType::${shorthand.title()});
}
// FIXME(emilio): This may be bogus in the same way as bug 1426246.
for (layer, other) in self.gecko.${layers_field_name}.mLayers.iter_mut()
.zip(other.gecko.${layers_field_name}.mLayers.iter())
.take(count as usize) {
layer.${field_name} = other.${field_name}.clone();
}
self.gecko.${layers_field_name}.${field_name}Count = count;
}
pub fn reset_${shorthand}_${name}(&mut self, other: &Self) {
self.copy_${shorthand}_${name}_from(other)
}
</%def>
<%def name="impl_simple_image_array_property(name, shorthand, layer_field_name, field_name, struct_name)">
<%
ident = "%s_%s" % (shorthand, name)
style_struct = next(x for x in data.style_structs if x.name == struct_name)
longhand = next(x for x in style_struct.longhands if x.ident == ident)
keyword = longhand.keyword
%>
<% copy_simple_image_array_property(name, shorthand, layer_field_name, field_name) %>
pub fn set_${ident}<I>(&mut self, v: I)
where
I: IntoIterator<Item=longhands::${ident}::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator,
{
use crate::properties::longhands::${ident}::single_value::computed_value::T as Keyword;
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${layer_field_name}, v.len(),
LayerType::${shorthand.title()});
}
self.gecko.${layer_field_name}.${field_name}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${layer_field_name}.mLayers.iter_mut()) {
geckolayer.${field_name} = {
match servo {
% for value in keyword.values_for("gecko"):
Keyword::${to_camel_case(value)} =>
structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast('u8')},
% endfor
}
};
}
}
pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T {
use crate::properties::longhands::${ident}::single_value::computed_value::T as Keyword;
% if keyword.needs_cast():
% for value in keyword.values_for('gecko'):
const ${keyword.casted_constant_name(value, "u8")} : u8 =
structs::${keyword.gecko_constant(value)} as u8;
% endfor
% endif
longhands::${ident}::computed_value::List(
self.gecko.${layer_field_name}.mLayers.iter()
.take(self.gecko.${layer_field_name}.${field_name}Count as usize)
.map(|ref layer| {
match layer.${field_name} {
% for value in longhand.keyword.values_for("gecko"):
% if keyword.needs_cast():
${keyword.casted_constant_name(value, "u8")}
% else:
structs::${keyword.gecko_constant(value)}
% endif
=> Keyword::${to_camel_case(value)},
% endfor
% if keyword.gecko_inexhaustive:
_ => panic!("Found unexpected value in style struct for ${ident} property"),
% endif
}
}).collect()
)
}
</%def>
<%def name="impl_common_image_layer_properties(shorthand)">
<%
if shorthand == "background":
image_layers_field = "mImage"
struct_name = "Background"
else:
image_layers_field = "mMask"
struct_name = "SVG"
%>
<%self:simple_image_array_property name="repeat" shorthand="${shorthand}" field_name="mRepeat">
use crate::values::specified::background::BackgroundRepeatKeyword;
use crate::gecko_bindings::structs::nsStyleImageLayers_Repeat;
use crate::gecko_bindings::structs::StyleImageLayerRepeat;
fn to_ns(repeat: BackgroundRepeatKeyword) -> StyleImageLayerRepeat {
match repeat {
BackgroundRepeatKeyword::Repeat => StyleImageLayerRepeat::Repeat,
BackgroundRepeatKeyword::Space => StyleImageLayerRepeat::Space,
BackgroundRepeatKeyword::Round => StyleImageLayerRepeat::Round,
BackgroundRepeatKeyword::NoRepeat => StyleImageLayerRepeat::NoRepeat,
}
}
let repeat_x = to_ns(servo.0);
let repeat_y = to_ns(servo.1);
nsStyleImageLayers_Repeat {
mXRepeat: repeat_x,
mYRepeat: repeat_y,
}
</%self:simple_image_array_property>
pub fn clone_${shorthand}_repeat(&self) -> longhands::${shorthand}_repeat::computed_value::T {
use crate::properties::longhands::${shorthand}_repeat::single_value::computed_value::T;
use crate::values::specified::background::BackgroundRepeatKeyword;
use crate::gecko_bindings::structs::StyleImageLayerRepeat;
fn to_servo(repeat: StyleImageLayerRepeat) -> BackgroundRepeatKeyword {
match repeat {
StyleImageLayerRepeat::Repeat => BackgroundRepeatKeyword::Repeat,
StyleImageLayerRepeat::Space => BackgroundRepeatKeyword::Space,
StyleImageLayerRepeat::Round => BackgroundRepeatKeyword::Round,
StyleImageLayerRepeat::NoRepeat => BackgroundRepeatKeyword::NoRepeat,
_ => panic!("Found unexpected value in style struct for ${shorthand}_repeat property"),
}
}
longhands::${shorthand}_repeat::computed_value::List(
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mRepeatCount as usize)
.map(|ref layer| {
T(to_servo(layer.mRepeat.mXRepeat), to_servo(layer.mRepeat.mYRepeat))
}).collect()
)
}
<% impl_simple_image_array_property("clip", shorthand, image_layers_field, "mClip", struct_name) %>
<% impl_simple_image_array_property("origin", shorthand, image_layers_field, "mOrigin", struct_name) %>
% for (orientation, keyword) in [("x", "horizontal"), ("y", "vertical")]:
pub fn copy_${shorthand}_position_${orientation}_from(&mut self, other: &Self) {
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let count = other.gecko.${image_layers_field}.mPosition${orientation.upper()}Count;
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field},
count as usize,
LayerType::${shorthand.capitalize()});
}
for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut()
.zip(other.gecko.${image_layers_field}.mLayers.iter())
.take(count as usize) {
layer.mPosition.${keyword} = other.mPosition.${keyword}.clone();
}
self.gecko.${image_layers_field}.mPosition${orientation.upper()}Count = count;
}
pub fn reset_${shorthand}_position_${orientation}(&mut self, other: &Self) {
self.copy_${shorthand}_position_${orientation}_from(other)
}
pub fn clone_${shorthand}_position_${orientation}(&self)
-> longhands::${shorthand}_position_${orientation}::computed_value::T {
longhands::${shorthand}_position_${orientation}::computed_value::List(
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mPosition${orientation.upper()}Count as usize)
.map(|position| position.mPosition.${keyword}.clone())
.collect()
)
}
pub fn set_${shorthand}_position_${orientation[0]}<I>(&mut self,
v: I)
where I: IntoIterator<Item = longhands::${shorthand}_position_${orientation[0]}
::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let v = v.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.len(),
LayerType::${shorthand.capitalize()});
}
self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count = v.len() as u32;
for (servo, geckolayer) in v.zip(self.gecko.${image_layers_field}
.mLayers.iter_mut()) {
geckolayer.mPosition.${keyword} = servo;
}
}
% endfor
<%self:simple_image_array_property name="size" shorthand="${shorthand}" field_name="mSize">
servo
</%self:simple_image_array_property>
pub fn clone_${shorthand}_size(&self) -> longhands::${shorthand}_size::computed_value::T {
longhands::${shorthand}_size::computed_value::List(
self.gecko.${image_layers_field}.mLayers.iter().map(|layer| layer.mSize.clone()).collect()
)
}
pub fn copy_${shorthand}_image_from(&mut self, other: &Self) {
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
unsafe {
let count = other.gecko.${image_layers_field}.mImageCount;
Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field},
count as usize,
LayerType::${shorthand.capitalize()});
for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut()
.zip(other.gecko.${image_layers_field}.mLayers.iter())
.take(count as usize) {
layer.mImage = other.mImage.clone();
}
self.gecko.${image_layers_field}.mImageCount = count;
}
}
pub fn reset_${shorthand}_image(&mut self, other: &Self) {
self.copy_${shorthand}_image_from(other)
}
#[allow(unused_variables)]
pub fn set_${shorthand}_image<I>(&mut self, images: I)
where I: IntoIterator<Item = longhands::${shorthand}_image::computed_value::single_value::T>,
I::IntoIter: ExactSizeIterator
{
use crate::gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType;
let images = images.into_iter();
unsafe {
Gecko_EnsureImageLayersLength(
&mut self.gecko.${image_layers_field},
images.len(),
LayerType::${shorthand.title()},
);
}
self.gecko.${image_layers_field}.mImageCount = images.len() as u32;
for (image, geckoimage) in images.zip(self.gecko.${image_layers_field}
.mLayers.iter_mut()) {
geckoimage.mImage = image;
}
}
pub fn clone_${shorthand}_image(&self) -> longhands::${shorthand}_image::computed_value::T {
longhands::${shorthand}_image::computed_value::List(
self.gecko.${image_layers_field}.mLayers.iter()
.take(self.gecko.${image_layers_field}.mImageCount as usize)
.map(|layer| layer.mImage.clone())
.collect()
)
}
<%
fill_fields = "mRepeat mClip mOrigin mPositionX mPositionY mImage mSize"
if shorthand == "background":
fill_fields += " mAttachment mBlendMode"
else:
# mSourceURI uses mImageCount
fill_fields += " mMaskMode mComposite"
%>
pub fn fill_arrays(&mut self) {
use crate::gecko_bindings::bindings::Gecko_FillAllImageLayers;
use std::cmp;
let mut max_len = 1;
% for member in fill_fields.split():
max_len = cmp::max(max_len, self.gecko.${image_layers_field}.${member}Count);
% endfor
unsafe {
// While we could do this manually, we'd need to also manually
// run all the copy constructors, so we just delegate to gecko
Gecko_FillAllImageLayers(&mut self.gecko.${image_layers_field}, max_len);
}
}
</%def>
// TODO: Gecko accepts lists in most background-related properties. We just use
// the first element (which is the common case), but at some point we want to
// add support for parsing these lists in servo and pushing to nsTArray's.
<% skip_background_longhands = """background-repeat
background-image background-clip
background-origin background-attachment
background-size background-position
background-blend-mode
background-position-x
background-position-y""" %>
<%self:impl_trait style_struct_name="Background"
skip_longhands="${skip_background_longhands}">
<% impl_common_image_layer_properties("background") %>
<% impl_simple_image_array_property("attachment", "background", "mImage", "mAttachment", "Background") %>
<% impl_simple_image_array_property("blend_mode", "background", "mImage", "mBlendMode", "Background") %>
</%self:impl_trait>
<%self:impl_trait style_struct_name="List" skip_longhands="list-style-type">
pub fn set_list_style_type(&mut self, v: longhands::list_style_type::computed_value::T) {
use nsstring::{nsACString, nsCStr};
use self::longhands::list_style_type::computed_value::T;
match v {
T::None => unsafe {
bindings::Gecko_SetCounterStyleToNone(&mut self.gecko.mCounterStyle)
}
T::CounterStyle(s) => s.to_gecko_value(&mut self.gecko.mCounterStyle),
T::String(s) => unsafe {
bindings::Gecko_SetCounterStyleToString(
&mut self.gecko.mCounterStyle,
&nsCStr::from(&s) as &nsACString,
)
}
}
}
pub fn copy_list_style_type_from(&mut self, other: &Self) {
unsafe {
Gecko_CopyCounterStyle(&mut self.gecko.mCounterStyle, &other.gecko.mCounterStyle);
}
}
pub fn reset_list_style_type(&mut self, other: &Self) {
self.copy_list_style_type_from(other)
}
pub fn clone_list_style_type(&self) -> longhands::list_style_type::computed_value::T {
use self::longhands::list_style_type::computed_value::T;
use crate::values::Either;
use crate::values::generics::CounterStyle;
use crate::gecko_bindings::bindings;
let name = unsafe {
bindings::Gecko_CounterStyle_GetName(&self.gecko.mCounterStyle)
};
if !name.is_null() {
let name = unsafe { Atom::from_raw(name) };
if name == atom!("none") {
return T::None;
}
}
let result = CounterStyle::from_gecko_value(&self.gecko.mCounterStyle);
match result {
Either::First(counter_style) => T::CounterStyle(counter_style),
Either::Second(string) => T::String(string),
}
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Table">
</%self:impl_trait>
<%self:impl_trait style_struct_name="Effects">
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedBox">
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedTable"
skip_longhands="border-spacing">
pub fn set_border_spacing(&mut self, v: longhands::border_spacing::computed_value::T) {
self.gecko.mBorderSpacingCol = v.horizontal().0;
self.gecko.mBorderSpacingRow = v.vertical().0;
}
pub fn copy_border_spacing_from(&mut self, other: &Self) {
self.gecko.mBorderSpacingCol = other.gecko.mBorderSpacingCol;
self.gecko.mBorderSpacingRow = other.gecko.mBorderSpacingRow;
}
pub fn reset_border_spacing(&mut self, other: &Self) {
self.copy_border_spacing_from(other)
}
pub fn clone_border_spacing(&self) -> longhands::border_spacing::computed_value::T {
longhands::border_spacing::computed_value::T::new(
Au(self.gecko.mBorderSpacingCol).into(),
Au(self.gecko.mBorderSpacingRow).into()
)
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedText"
skip_longhands="-webkit-text-stroke-width">
${impl_non_negative_length('_webkit_text_stroke_width',
'mWebkitTextStrokeWidth')}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Text" skip_longhands="initial-letter">
pub fn set_initial_letter(&mut self, v: longhands::initial_letter::computed_value::T) {
use crate::values::generics::text::InitialLetter;
match v {
InitialLetter::Normal => {
self.gecko.mInitialLetterSize = 0.;
self.gecko.mInitialLetterSink = 0;
},
InitialLetter::Specified(size, sink) => {
self.gecko.mInitialLetterSize = size;
if let Some(sink) = sink {
self.gecko.mInitialLetterSink = sink;
} else {
self.gecko.mInitialLetterSink = size.floor() as i32;
}
}
}
}
pub fn copy_initial_letter_from(&mut self, other: &Self) {
self.gecko.mInitialLetterSize = other.gecko.mInitialLetterSize;
self.gecko.mInitialLetterSink = other.gecko.mInitialLetterSink;
}
pub fn reset_initial_letter(&mut self, other: &Self) {
self.copy_initial_letter_from(other)
}
pub fn clone_initial_letter(&self) -> longhands::initial_letter::computed_value::T {
use crate::values::generics::text::InitialLetter;
if self.gecko.mInitialLetterSize == 0. && self.gecko.mInitialLetterSink == 0 {
InitialLetter::Normal
} else if self.gecko.mInitialLetterSize.floor() as i32 == self.gecko.mInitialLetterSink {
InitialLetter::Specified(self.gecko.mInitialLetterSize, None)
} else {
InitialLetter::Specified(self.gecko.mInitialLetterSize, Some(self.gecko.mInitialLetterSink))
}
}
</%self:impl_trait>
<% skip_svg_longhands = """
mask-mode mask-repeat mask-clip mask-origin mask-composite mask-position-x mask-position-y mask-size mask-image
"""
%>
<%self:impl_trait style_struct_name="SVG"
skip_longhands="${skip_svg_longhands}">
<% impl_common_image_layer_properties("mask") %>
<% impl_simple_image_array_property("mode", "mask", "mMask", "mMaskMode", "SVG") %>
<% impl_simple_image_array_property("composite", "mask", "mMask", "mComposite", "SVG") %>
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedSVG">
</%self:impl_trait>
<%self:impl_trait style_struct_name="InheritedUI">
</%self:impl_trait>
<%self:impl_trait style_struct_name="Column"
skip_longhands="column-count column-rule-width column-rule-style">
#[allow(unused_unsafe)]
pub fn set_column_count(&mut self, v: longhands::column_count::computed_value::T) {
use crate::gecko_bindings::structs::{nsStyleColumn_kColumnCountAuto, nsStyleColumn_kMaxColumnCount};
self.gecko.mColumnCount = match v {
ColumnCount::Integer(integer) => {
cmp::min(integer.0 as u32, unsafe { nsStyleColumn_kMaxColumnCount })
},
ColumnCount::Auto => nsStyleColumn_kColumnCountAuto
};
}
${impl_simple_copy('column_count', 'mColumnCount')}
pub fn clone_column_count(&self) -> longhands::column_count::computed_value::T {
use crate::gecko_bindings::structs::{nsStyleColumn_kColumnCountAuto, nsStyleColumn_kMaxColumnCount};
if self.gecko.mColumnCount != nsStyleColumn_kColumnCountAuto {
debug_assert!(self.gecko.mColumnCount >= 1 &&
self.gecko.mColumnCount <= nsStyleColumn_kMaxColumnCount);
ColumnCount::Integer((self.gecko.mColumnCount as i32).into())
} else {
ColumnCount::Auto
}
}
<% impl_non_negative_length("column_rule_width", "mColumnRuleWidth",
round_to_pixels=True) %>
${impl_simple('column_rule_style', 'mColumnRuleStyle')}
</%self:impl_trait>
<%self:impl_trait style_struct_name="Counters">
pub fn ineffective_content_property(&self) -> bool {
!self.gecko.mContent.is_items()
}
</%self:impl_trait>
<%self:impl_trait style_struct_name="UI">
</%self:impl_trait>
<%self:impl_trait style_struct_name="XUL">
</%self:impl_trait>
% for style_struct in data.style_structs:
${declare_style_struct(style_struct)}
${impl_style_struct(style_struct)}
% endfor
/// Assert that the initial values set in Gecko style struct constructors
/// match the values returned by `get_initial_value()` for each longhand.
#[cfg(feature = "gecko")]
#[inline]
pub fn assert_initial_values_match(data: &PerDocumentStyleData) {
if cfg!(debug_assertions) {<|fim▁hole|> SKIPPED = [
"border-top-width",
"border-bottom-width",
"border-left-width",
"border-right-width",
"font-family",
"font-size",
"outline-width",
]
TO_TEST = [p for p in data.longhands if p.enabled_in != "" and not p.logical and not p.name in SKIPPED]
%>
% for property in TO_TEST:
assert_eq!(
cv.clone_${property.ident}(),
longhands::${property.ident}::get_initial_value(),
concat!(
"initial value in Gecko style struct for ",
stringify!(${property.ident}),
" must match longhands::",
stringify!(${property.ident}),
"::get_initial_value()"
)
);
% endfor
}
}<|fim▁end|> | let data = data.borrow();
let cv = data.stylist.device().default_computed_values();
<%
# Skip properties with initial values that change at computed value time. |
<|file_name|>external-macro-src.rs<|end_file_name|><|fim▁begin|>// aux-build:external-macro-src.rs
#![crate_name = "foo"]
#[macro_use]<|fim▁hole|>
// @has foo/struct.Foo.html
// @has - '//a[@href="../src/foo/external-macro-src.rs.html#12"]' '[src]'
make_foo!();<|fim▁end|> | extern crate external_macro_src;
// @has foo/index.html '//a[@href="../src/foo/external-macro-src.rs.html#3-12"]' '[src]' |
<|file_name|>room.rs<|end_file_name|><|fim▁begin|>use util::{Privacy, AppendToQueryParams};
use message::{Color, MessageFormat};
use url::UrlQuery;
use url::form_urlencoded::Serializer;
#[derive(Debug, Hash, Eq, PartialEq)]
pub struct RoomsRequest {
pub start_index: Option<u64>,
pub max_results: Option<u64>,
pub include_private: Option<bool>,
pub include_archived: Option<bool>
}
impl AppendToQueryParams for RoomsRequest {
fn append_to(&self, query: &mut Serializer<UrlQuery>){
self.start_index.map(|start_index| query.append_pair("start-index", &start_index.to_string()));
self.max_results.map(|max_results| query.append_pair("max-results", &max_results.to_string()));
self.include_private.map(|include_private| query.append_pair("include-private", &include_private.to_string()));
self.include_archived.map(|include_archived| query.append_pair("include-archived", &include_archived.to_string()));
}
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct Rooms {
#[serde(rename = "startIndex")]
pub start_index: u64,
#[serde(rename = "maxResults")]
pub max_results: u64,
pub items: Vec<Room>,
pub links: RoomsLinks
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomsLinks {
#[serde(rename = "self")]
pub self_: String,
pub prev: Option<String>,
pub next: Option<String>
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct Room {
pub name: String,
pub id: u64,
pub links: RoomDetailLinks
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetail {
pub xmpp_jid: String,
pub statistics: RoomDetailStatistics,
pub name: String,
pub links: RoomDetailLinks,
pub created: String,
pub is_archived: bool,
pub privacy: Privacy,
pub is_guest_accessible: bool,
pub topic: String,
pub avatar_url: Option<String>,
pub id: u64,
pub guest_access_url: Option<String>
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetailStatistics {
pub links: RoomDetailStatisticsLinks
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetailStatisticsLinks {
#[serde(rename = "self")]
pub self_: String
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetailLinks {
#[serde(rename = "self")]
pub self_: String,
pub webhooks: String,
pub members: Option<String>,
pub participants: String
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetailOwner {
pub mention_name: String,
pub id: u64,
pub links: RoomDetailOwnerLinks,
pub name: String
}
#[derive(Debug, Hash, Eq, PartialEq, Deserialize)]
pub struct RoomDetailOwnerLinks {
#[serde(rename = "self")]
pub self_: String
}
#[derive(Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct RoomUpdate {
pub name: Option<String>,
pub privacy: Option<Privacy>,
pub is_archived: Option<bool>,
pub is_guest_accessible: Option<bool>,
pub topic: Option<String>,
pub owner: Option<RoomUpdateOwner>
}
#[derive(Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct RoomUpdateOwner {
pub id: Option<String>
}
#[derive(Debug, Hash, Eq, PartialEq, Serialize, Deserialize)]
pub struct Notification {
pub color: Color,
pub message: String,
pub notify: bool,
pub message_format: MessageFormat
}
impl Default for Notification {
fn default() -> Self {
Notification {
color: Color::default(),
message: String::default(),
notify: false,
message_format: MessageFormat::default()
}
}
}
#[cfg(test)]
mod test {
use super::*;
use url::Url;
use serde_json::{self};
#[test]
fn unit_rooms_links() {
let expected = RoomsLinks {
self_: "https://www.example.com".to_owned(),
prev: Some("https://www.example.com".to_owned()),
next: Some("https://www.example.com".to_owned())
};
let actual: RoomsLinks = serde_json::from_str(r#"{
"self":"https://www.example.com",<|fim▁hole|> }"#).unwrap();
assert_eq!(actual, expected);
}
#[test]
fn unit_default_rooms_request_should_create_empty_params(){
let rooms_request = RoomsRequest{ start_index: None,
max_results: None,
include_private: None,
include_archived: None };
let mut url = Url::parse("https://rsolomo.github.io/hipchat-client/hipchat_client/index.html").unwrap();
rooms_request.append_to(&mut url.query_pairs_mut());
assert_eq!(Some(""), url.query());
}
#[test]
fn unit_populated_rooms_request_should_create_encoded_params(){
let rooms_request = RoomsRequest{ start_index: Some(1),
max_results: Some(10),
include_private: Some(true),
include_archived: Some(true) };
let mut url = Url::parse("https://rsolomo.github.io/hipchat-client/hipchat_client/index.html").unwrap();
rooms_request.append_to(&mut url.query_pairs_mut());
assert_eq!(Some("start-index=1&max-results=10&include-private=true&include-archived=true"), url.query());
}
}<|fim▁end|> | "prev":"https://www.example.com",
"next":"https://www.example.com" |
<|file_name|>add.js<|end_file_name|><|fim▁begin|>import { create, visitable } from 'ember-cli-page-object';<|fim▁hole|>import editForm from 'vault/tests/pages/components/identity/edit-form';
export default create({
visit: visitable('/vault/access/identity/:item_type/aliases/add/:id'),
editForm,
});<|fim▁end|> | |
<|file_name|>tooltip.go<|end_file_name|><|fim▁begin|>// Copyright 2010 The Walk Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package walk
import (
"syscall"
"unsafe"
)
import (
"github.com/lxn/win"
)
func init() {
var err error
if globalToolTip, err = NewToolTip(); err != nil {
panic(err)
}
}
type ToolTip struct {
WindowBase
}
var globalToolTip *ToolTip
func NewToolTip() (*ToolTip, error) {
tt := new(ToolTip)
if err := InitWindow(
tt,
nil,
"tooltips_class32",
win.WS_POPUP|win.TTS_ALWAYSTIP,
win.WS_EX_TOPMOST); err != nil {
return nil, err
}
succeeded := false
defer func() {
if !succeeded {
tt.Dispose()
}
}()
win.SetWindowPos(tt.hWnd, win.HWND_TOPMOST, 0, 0, 0, 0, win.SWP_NOMOVE|win.SWP_NOSIZE|win.SWP_NOACTIVATE)
succeeded = true
return tt, nil
}
func (*ToolTip) LayoutFlags() LayoutFlags {
return 0
}
func (tt *ToolTip) SizeHint() Size {
return Size{0, 0}
}
func (tt *ToolTip) Title() string {
var gt win.TTGETTITLE
buf := make([]uint16, 100)
gt.DwSize = uint32(unsafe.Sizeof(gt))
gt.Cch = uint32(len(buf))
gt.PszTitle = &buf[0]
tt.SendMessage(win.TTM_GETTITLE, 0, uintptr(unsafe.Pointer(>)))
return syscall.UTF16ToString(buf)
}
func (tt *ToolTip) SetTitle(title string) error {
return tt.setTitle(title, win.TTI_NONE)
}
func (tt *ToolTip) SetInfoTitle(title string) error {
return tt.setTitle(title, win.TTI_INFO)
}
func (tt *ToolTip) SetWarningTitle(title string) error {
return tt.setTitle(title, win.TTI_WARNING)
}
func (tt *ToolTip) SetErrorTitle(title string) error {
return tt.setTitle(title, win.TTI_ERROR)
}
func (tt *ToolTip) setTitle(title string, icon uintptr) error {
if len(title) > 99 {
title = title[:99]
}
if win.FALSE == tt.SendMessage(win.TTM_SETTITLE, icon, uintptr(unsafe.Pointer(syscall.StringToUTF16Ptr(title)))) {
return newError("TTM_SETTITLE failed")
}
return nil
}
func (tt *ToolTip) AddTool(tool Widget) error {
hwnd := tool.Handle()
var ti win.TOOLINFO
ti.CbSize = uint32(unsafe.Sizeof(ti))
ti.Hwnd = hwnd
ti.UFlags = win.TTF_IDISHWND | win.TTF_SUBCLASS
ti.UId = uintptr(hwnd)
if win.FALSE == tt.SendMessage(win.TTM_ADDTOOL, 0, uintptr(unsafe.Pointer(&ti))) {
return newError("TTM_ADDTOOL failed")
}
return nil
}
func (tt *ToolTip) RemoveTool(tool Widget) error {
hwnd := tool.Handle()
var ti win.TOOLINFO
ti.CbSize = uint32(unsafe.Sizeof(ti))
ti.Hwnd = hwnd
ti.UId = uintptr(hwnd)
tt.SendMessage(win.TTM_DELTOOL, 0, uintptr(unsafe.Pointer(&ti)))
return nil
}
func (tt *ToolTip) Text(tool Widget) string {
ti := tt.toolInfo(tool)
if ti == nil {
return ""<|fim▁hole|>
return win.UTF16PtrToString(ti.LpszText)
}
func (tt *ToolTip) SetText(tool Widget, text string) error {
ti := tt.toolInfo(tool)
if ti == nil {
return newError("unknown tool")
}
if len(text) > 79 {
text = text[:79]
}
ti.LpszText = syscall.StringToUTF16Ptr(text)
tt.SendMessage(win.TTM_SETTOOLINFO, 0, uintptr(unsafe.Pointer(ti)))
return nil
}
func (tt *ToolTip) toolInfo(tool Widget) *win.TOOLINFO {
var ti win.TOOLINFO
var buf [80]uint16
hwnd := tool.Handle()
ti.CbSize = uint32(unsafe.Sizeof(ti))
ti.Hwnd = hwnd
ti.UId = uintptr(hwnd)
ti.LpszText = &buf[0]
if win.FALSE == tt.SendMessage(win.TTM_GETTOOLINFO, 0, uintptr(unsafe.Pointer(&ti))) {
return nil
}
return &ti
}<|fim▁end|> | } |
<|file_name|>metric_test.go<|end_file_name|><|fim▁begin|>package sql_test
import (
"bytes"
"testing"
"github.com/cockroachdb/cockroach/roachpb"
"github.com/cockroachdb/cockroach/testutils"
"github.com/cockroachdb/cockroach/testutils/storageutils"
"github.com/cockroachdb/cockroach/util/leaktest"
)<|fim▁hole|>func TestQueryCounts(t *testing.T) {
defer leaktest.AfterTest(t)()
s, sqlDB, _ := setup(t)
defer cleanup(s, sqlDB)
var testcases = []struct {
query string
txnBeginCount int64
selectCount int64
updateCount int64
insertCount int64
deleteCount int64
ddlCount int64
miscCount int64
txnCommitCount int64
txnRollbackCount int64
}{
{"", 0, 0, 0, 0, 0, 0, 0, 0, 0},
{"BEGIN; END", 1, 0, 0, 0, 0, 0, 0, 1, 0},
{"SELECT 1", 1, 1, 0, 0, 0, 0, 0, 1, 0},
{"CREATE DATABASE mt", 1, 1, 0, 0, 0, 1, 0, 1, 0},
{"CREATE TABLE mt.n (num INTEGER)", 1, 1, 0, 0, 0, 2, 0, 1, 0},
{"INSERT INTO mt.n VALUES (3)", 1, 1, 0, 1, 0, 2, 0, 1, 0},
{"UPDATE mt.n SET num = num + 1", 1, 1, 1, 1, 0, 2, 0, 1, 0},
{"DELETE FROM mt.n", 1, 1, 1, 1, 1, 2, 0, 1, 0},
{"ALTER TABLE mt.n ADD COLUMN num2 INTEGER", 1, 1, 1, 1, 1, 3, 0, 1, 0},
{"EXPLAIN SELECT * FROM mt.n", 1, 1, 1, 1, 1, 3, 1, 1, 0},
{"BEGIN; UPDATE mt.n SET num = num + 1; END", 2, 1, 2, 1, 1, 3, 1, 2, 0},
{"SELECT * FROM mt.n; SELECT * FROM mt.n; SELECT * FROM mt.n", 2, 4, 2, 1, 1, 3, 1, 2, 0},
{"DROP TABLE mt.n", 2, 4, 2, 1, 1, 4, 1, 2, 0},
{"SET database = system", 2, 4, 2, 1, 1, 4, 2, 2, 0},
}
for _, tc := range testcases {
if tc.query != "" {
if _, err := sqlDB.Exec(tc.query); err != nil {
t.Fatalf("unexpected error executing '%s': %s'", tc.query, err)
}
}
// Force metric snapshot refresh.
if err := s.WriteSummaries(); err != nil {
t.Fatal(err)
}
checkCounterEQ(t, s, "txn.begin.count", tc.txnBeginCount)
checkCounterEQ(t, s, "select.count", tc.selectCount)
checkCounterEQ(t, s, "update.count", tc.updateCount)
checkCounterEQ(t, s, "insert.count", tc.insertCount)
checkCounterEQ(t, s, "delete.count", tc.deleteCount)
checkCounterEQ(t, s, "ddl.count", tc.ddlCount)
checkCounterEQ(t, s, "misc.count", tc.miscCount)
checkCounterEQ(t, s, "txn.commit.count", tc.txnCommitCount)
checkCounterEQ(t, s, "txn.rollback.count", tc.txnRollbackCount)
checkCounterEQ(t, s, "txn.abort.count", 0)
// Everything after this query will also fail, so quit now to avoid deluge of errors.
if t.Failed() {
t.FailNow()
}
}
}
func TestAbortCountConflictingWrites(t *testing.T) {
defer leaktest.AfterTest(t)()
ctx, cmdFilters := createTestServerContext()
s, sqlDB, _ := setupWithContext(t, ctx)
defer cleanup(s, sqlDB)
if _, err := sqlDB.Exec("CREATE DATABASE db"); err != nil {
t.Fatal(err)
}
if _, err := sqlDB.Exec("CREATE TABLE db.t (k TEXT PRIMARY KEY, v TEXT)"); err != nil {
t.Fatal(err)
}
// Inject errors on the INSERT below.
restarted := false
cmdFilters.AppendFilter(func(args storageutils.FilterArgs) *roachpb.Error {
switch req := args.Req.(type) {
// SQL INSERT generates ConditionalPuts for unique indexes (such as the PK).
case *roachpb.ConditionalPutRequest:
if bytes.Contains(req.Value.RawBytes, []byte("marker")) && !restarted {
restarted = true
return roachpb.NewErrorWithTxn(
roachpb.NewTransactionAbortedError(), args.Hdr.Txn)
}
}
return nil
}, false)
txn, err := sqlDB.Begin()
if err != nil {
t.Fatal(err)
}
_, err = txn.Exec("INSERT INTO db.t VALUES ('key', 'marker')")
if !testutils.IsError(err, "aborted") {
t.Fatal(err)
}
if err = txn.Rollback(); err != nil {
t.Fatal(err)
}
checkCounterEQ(t, s, "txn.abort.count", 1)
checkCounterEQ(t, s, "txn.begin.count", 1)
checkCounterEQ(t, s, "txn.rollback.count", 0)
checkCounterEQ(t, s, "txn.commit.count", 0)
checkCounterEQ(t, s, "insert.count", 1)
}
// TestErrorDuringTransaction tests that the transaction abort count goes up when a query
// results in an error during a txn.
func TestAbortCountErrorDuringTransaction(t *testing.T) {
defer leaktest.AfterTest(t)()
s, sqlDB, _ := setup(t)
defer cleanup(s, sqlDB)
txn, err := sqlDB.Begin()
if err != nil {
t.Fatal(err)
}
if _, err := txn.Query("SELECT * FROM i_do.not_exist"); err == nil {
t.Fatal("Expected an error but didn't get one")
}
checkCounterEQ(t, s, "txn.abort.count", 1)
checkCounterEQ(t, s, "txn.begin.count", 1)
checkCounterEQ(t, s, "select.count", 1)
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from functools import reduce # noqa
except Exception:
pass
try:
from .tornado_handler import TornadoHandler # noqa
except ImportError:<|fim▁hole|><|fim▁end|> | pass
from .environmentdump import EnvironmentDump # noqa
from .healthcheck import HealthCheck # noqa |
<|file_name|>linear_test.go<|end_file_name|><|fim▁begin|>package linear_test
import (
"github.com/amitkgupta/goodlearn/data/columntype"
"github.com/amitkgupta/goodlearn/data/dataset"
"github.com/amitkgupta/goodlearn/data/row"
"github.com/amitkgupta/goodlearn/data/slice"
"github.com/amitkgupta/goodlearn/errors/regressor/linearerrors"
"github.com/amitkgupta/goodlearn/regressor"
"github.com/amitkgupta/goodlearn/regressor/linear"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("LinearRegressor", func() {
var linearRegressor regressor.Regressor
Describe("Train", func() {
var trainingData dataset.Dataset
BeforeEach(func() {
linearRegressor = linear.NewLinearRegressor()
})
Context("When the dataset's features are not all floats", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"3.3", "bye", "0"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{1, 2}, []int{0}, columnTypes)
})
It("Returns an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.NonFloatFeaturesTrainingSetError{}))
})
})
Context("When the dataset's targets are not all floats", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"hi", "2.3", "0"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{1, 2}, []int{0}, columnTypes)
})
It("Returns an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.NonFloatTargetsTrainingSetError{}))
})
})
Context("When the dataset has zero target values", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"1.3", "2.0"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{1, 0}, []int{}, columnTypes)
})
It("Returns an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.InvalidNumberOfTargetsError{}))
})
})
Context("When the dataset has more than one target value", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"1.3", "2.2", "0"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{1}, []int{0, 2}, columnTypes)
})
It("Returns an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.InvalidNumberOfTargetsError{}))
})
})
Context("When the dataset has no features", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"1.3"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{}, []int{0}, columnTypes)
})
It("Returns an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.NoFeaturesError{}))
})
})
Context("When the dataset is valid", func() {
BeforeEach(func() {
columnTypes, err := columntype.StringsToColumnTypes([]string{"1.2", "3.4", "5.6"})
Ω(err).ShouldNot(HaveOccurred())
trainingData = dataset.NewDataset([]int{1, 2}, []int{0}, columnTypes)
err = trainingData.AddRowFromStrings([]string{"1.2", "3.0", "0.5"})
Ω(err).ShouldNot(HaveOccurred())
})
It("Doesn't return an error", func() {
err := linearRegressor.Train(trainingData)
Ω(err).ShouldNot(HaveOccurred())
})
})
})
Describe("Predict", func() {
var testRow row.Row
var emptyTarget slice.Slice
var columnTypes []columntype.ColumnType
var err error
BeforeEach(func() {
linearRegressor = linear.NewLinearRegressor()
columnTypes, err = columntype.StringsToColumnTypes([]string{"0", "0", "0"})
Ω(err).ShouldNot(HaveOccurred())
emptyTarget, err = slice.SliceFromRawValues(true, []int{}, columnTypes, []float64{})
Ω(err).ShouldNot(HaveOccurred())
})
Context("When the regressor hasn't been trained", func() {
BeforeEach(func() {
features, err := slice.SliceFromRawValues(true, []int{1}, columnTypes, []float64{0, 1, 2})
Ω(err).ShouldNot(HaveOccurred())
testRow = row.NewRow(features, emptyTarget, 1)
})
It("Returns an error", func() {
_, err := linearRegressor.Predict(testRow)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.UntrainedRegressorError{}))
})
})
Context("When the regressor has been trained", func() {
BeforeEach(func() {
trainingData := dataset.NewDataset([]int{1, 2}, []int{0}, columnTypes)
err = trainingData.AddRowFromStrings([]string{"-0.002001", "2", "3"})
Ω(err).ShouldNot(HaveOccurred())
err = trainingData.AddRowFromStrings([]string{"-0.001001", "2", "2"})
Ω(err).ShouldNot(HaveOccurred())
err = trainingData.AddRowFromStrings([]string{"-0.000999", "3", "3"})
Ω(err).ShouldNot(HaveOccurred())
err = linearRegressor.Train(trainingData)<|fim▁hole|> BeforeEach(func() {
features, err := slice.SliceFromRawValues(true, []int{1}, columnTypes, []float64{0, 1, 2})
Ω(err).ShouldNot(HaveOccurred())
testRow = row.NewRow(features, emptyTarget, 1)
})
It("Returns an error", func() {
_, err := linearRegressor.Predict(testRow)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.RowLengthMismatchError{}))
})
})
Context("When the test row's features are not all floats", func() {
BeforeEach(func() {
features, err := slice.SliceFromRawValues(false, []int{0, 1}, columnTypes, []float64{0, 1, 2})
Ω(err).ShouldNot(HaveOccurred())
testRow = row.NewRow(features, emptyTarget, 2)
})
It("Returns an error", func() {
_, err := linearRegressor.Predict(testRow)
Ω(err).Should(HaveOccurred())
Ω(err).Should(BeAssignableToTypeOf(linearerrors.NonFloatFeaturesTestRowError{}))
})
})
Context("When the test row is compatible with the training data", func() {
BeforeEach(func() {
features, err := slice.SliceFromRawValues(true, []int{1, 2}, columnTypes, []float64{0, 3.3, 1.0})
Ω(err).ShouldNot(HaveOccurred())
testRow = row.NewRow(features, emptyTarget, 2)
})
It("Predicts the target value for the test row", func() {
predictedTarget, err := linearRegressor.Predict(testRow)
Ω(err).ShouldNot(HaveOccurred())
Ω(predictedTarget).Should(BeNumerically("~", 0.0013, 0.0001))
})
})
})
})
})<|fim▁end|> | Ω(err).ShouldNot(HaveOccurred())
})
Context("When number of test features does not equal number of training features", func() { |
<|file_name|>enum-alignment.rs<|end_file_name|><|fim▁begin|>// run-pass<|fim▁hole|>
use std::mem;
fn addr_of<T>(ptr: &T) -> usize {
ptr as *const T as usize
}
fn is_aligned<T>(ptr: &T) -> bool {
unsafe {
let addr: usize = mem::transmute(ptr);
(addr % mem::min_align_of::<T>()) == 0
}
}
pub fn main() {
let x = Some(0u64);
match x {
None => panic!(),
Some(ref y) => assert!(is_aligned(y))
}
}<|fim▁end|> | #![allow(dead_code)]
#![allow(deprecated)] |
<|file_name|>pymem.rs<|end_file_name|><|fim▁begin|>use libc::{c_void, size_t};
#[cfg(Py_3_4)]
#[cfg(not(Py_LIMITED_API))]
#[cfg_attr(windows, link(name="pythonXY"))] extern "C" {
pub fn PyMem_RawMalloc(size: size_t) -> *mut c_void;
#[cfg(Py_3_5)]
pub fn PyMem_RawCalloc(nelem: size_t, elsize: size_t)
-> *mut c_void;
pub fn PyMem_RawRealloc(ptr: *mut c_void, new_size: size_t)
-> *mut c_void;
pub fn PyMem_RawFree(ptr: *mut c_void) -> ();
}
#[cfg_attr(windows, link(name="pythonXY"))] extern "C" {
pub fn PyMem_Malloc(size: size_t) -> *mut c_void;
#[cfg(Py_3_5)]
pub fn PyMem_Calloc(nelem: size_t, elsize: size_t) -> *mut c_void;
pub fn PyMem_Realloc(ptr: *mut c_void, new_size: size_t)<|fim▁hole|> -> *mut c_void;
pub fn PyMem_Free(ptr: *mut c_void) -> ();
}
#[cfg(Py_3_4)]
#[cfg(not(Py_LIMITED_API))]
#[repr(C)]
#[derive(Copy, Clone)]
pub enum PyMemAllocatorDomain {
PYMEM_DOMAIN_RAW,
PYMEM_DOMAIN_MEM,
PYMEM_DOMAIN_OBJ
}
#[repr(C)]
#[derive(Copy, Clone)]
#[cfg(all(Py_3_4, not(Py_3_5), not(Py_LIMITED_API)))]
pub struct PyMemAllocator {
pub ctx: *mut c_void,
pub malloc: Option<extern "C" fn(ctx: *mut c_void,
size: size_t)
-> *mut c_void>,
pub realloc: Option<extern "C" fn(ctx: *mut c_void,
ptr: *mut c_void,
new_size: size_t)
-> *mut c_void>,
pub free: Option<extern "C" fn(ctx: *mut c_void,
ptr: *mut c_void)
-> ()>,
}
#[repr(C)]
#[derive(Copy, Clone)]
#[cfg(all(Py_3_5, not(Py_LIMITED_API)))]
pub struct PyMemAllocatorEx {
pub ctx: *mut c_void,
pub malloc: Option<extern "C" fn(ctx: *mut c_void,
size: size_t)
-> *mut c_void>,
pub calloc: Option<extern "C" fn(ctx: *mut c_void,
nelem: size_t,
elsize: size_t)
-> *mut c_void>,
pub realloc: Option<extern "C" fn(ctx: *mut c_void,
ptr: *mut c_void,
new_size: size_t)
-> *mut c_void>,
pub free: Option<extern "C" fn(ctx: *mut c_void,
ptr: *mut c_void)
-> ()>,
}
#[cfg(Py_3_4)]
#[cfg(not(Py_LIMITED_API))]
#[cfg_attr(windows, link(name="pythonXY"))] extern "C" {
#[cfg(not(Py_3_5))]
pub fn PyMem_GetAllocator(domain: PyMemAllocatorDomain,
allocator: *mut PyMemAllocator) -> ();
#[cfg(not(Py_3_5))]
pub fn PyMem_SetAllocator(domain: PyMemAllocatorDomain,
allocator: *mut PyMemAllocator) -> ();
#[cfg(Py_3_5)]
pub fn PyMem_GetAllocator(domain: PyMemAllocatorDomain,
allocator: *mut PyMemAllocatorEx) -> ();
#[cfg(Py_3_5)]
pub fn PyMem_SetAllocator(domain: PyMemAllocatorDomain,
allocator: *mut PyMemAllocatorEx) -> ();
pub fn PyMem_SetupDebugHooks() -> ();
}<|fim▁end|> | |
<|file_name|>generics-and-bounds.rs<|end_file_name|><|fim▁begin|>// build-pass (FIXME(62277): could be check-pass?)
// edition:2018
// compile-flags: --crate-type lib
use std::future::Future;
pub async fn simple_generic<T>() {}
pub trait Foo {
fn foo(&self) {}
}
struct FooType;
impl Foo for FooType {}
pub async fn call_generic_bound<F: Foo>(f: F) {
f.foo()
}
pub async fn call_where_clause<F>(f: F)
where
F: Foo,
{
f.foo()
}
pub async fn call_impl_trait(f: impl Foo) {
f.foo()
}
pub async fn call_with_ref(f: &impl Foo) {
f.foo()
}
pub fn async_fn_with_same_generic_params_unifies() {
let mut a = call_generic_bound(FooType);
a = call_generic_bound(FooType);
let mut b = call_where_clause(FooType);
b = call_where_clause(FooType);
let mut c = call_impl_trait(FooType);
c = call_impl_trait(FooType);
let f_one = FooType;
let f_two = FooType;<|fim▁hole|>
pub fn simple_generic_block<T>() -> impl Future<Output = ()> {
async move {}
}
pub fn call_generic_bound_block<F: Foo>(f: F) -> impl Future<Output = ()> {
async move { f.foo() }
}
pub fn call_where_clause_block<F>(f: F) -> impl Future<Output = ()>
where
F: Foo,
{
async move { f.foo() }
}
pub fn call_impl_trait_block(f: impl Foo) -> impl Future<Output = ()> {
async move { f.foo() }
}
pub fn call_with_ref_block<'a>(f: &'a (impl Foo + 'a)) -> impl Future<Output = ()> + 'a {
async move { f.foo() }
}
pub fn async_block_with_same_generic_params_unifies() {
let mut a = call_generic_bound_block(FooType);
a = call_generic_bound_block(FooType);
let mut b = call_where_clause_block(FooType);
b = call_where_clause_block(FooType);
let mut c = call_impl_trait_block(FooType);
c = call_impl_trait_block(FooType);
let f_one = FooType;
let f_two = FooType;
let mut d = call_with_ref_block(&f_one);
d = call_with_ref_block(&f_two);
}<|fim▁end|> | let mut d = call_with_ref(&f_one);
d = call_with_ref(&f_two);
} |
<|file_name|>angular-locale_en-jm.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module("ngLocale", [], ["$provide", function ($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {<|fim▁hole|> }
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"Sunday",
"Monday",
"Tuesday",
"Wednesday",
"Thursday",
"Friday",
"Saturday"
],
"ERANAMES": [
"Before Christ",
"Anno Domini"
],
"ERAS": [
"BC",
"AD"
],
"FIRSTDAYOFWEEK": 6,
"MONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"SHORTDAY": [
"Sun",
"Mon",
"Tue",
"Wed",
"Thu",
"Fri",
"Sat"
],
"SHORTMONTH": [
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec"
],
"STANDALONEMONTH": [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
],
"WEEKENDRANGE": [
5,
6
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y h:mm:ss a",
"mediumDate": "d MMM y",
"mediumTime": "h:mm:ss a",
"short": "d/M/yy h:mm a",
"shortDate": "d/M/yy",
"shortTime": "h:mm a"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "$",
"DECIMAL_SEP": ".",
"GROUP_SEP": ",",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-\u00a4",
"negSuf": "",
"posPre": "\u00a4",
"posSuf": ""
}
]
},
"id": "en-jm",
"localeID": "en_JM",
"pluralCat": function (n, opt_precision) {
var i = n | 0;
var vf = getVF(n, opt_precision);
if (i == 1 && vf.v == 0) {
return PLURAL_CATEGORY.ONE;
}
return PLURAL_CATEGORY.OTHER;
}
});
}]);<|fim▁end|> | v = Math.min(getDecimals(n), 3); |
<|file_name|>control_flow_util.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilty functions for control flow.
This file is necessary to avoid cyclic dependencies between ops.py and
control_flow_ops.py.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import traceback
from tensorflow.python.platform import tf_logging as logging
ENABLE_CONTROL_FLOW_V2 = (os.getenv("TF_ENABLE_CONTROL_FLOW_V2", "0") != "0" or
os.getenv("TF_ENABLE_COND_V2", "0") != "0" or
os.getenv("TF_ENABLE_WHILE_V2", "0") != "0" or
os.getenv("TF_ENABLE_TENSOR_ARRAY_V2", "0") != "0")
def EnableControlFlowV2(graph):
"""Returns whether control flow v2 should be used in `graph`."""
# Enable new control flow in FuncGraphs (but not legacy _FuncGraphs).
# TODO(skyewm): do something better than hasattr without messing up imports.
return ENABLE_CONTROL_FLOW_V2 or (
graph.building_function and not hasattr(graph, "_captured"))
def IsInXLAContext(op):
try:
xla_compile = op.get_attr("_XlaCompile")
if xla_compile: return True
except ValueError:
pass
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
return GetContainingXLAContext(ctxt) is not None
def InXlaContext(graph):
ctxt = graph._get_control_flow_context() # pylint: disable=protected-access
return GetContainingXLAContext(ctxt) is not None
<|fim▁hole|> while True:
if InXlaContext(graph): return True
try:
graph = graph.outer_graph
except AttributeError:
return False
def IsInWhileLoop(op):
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
return GetContainingWhileContext(ctxt) is not None
def IsInCond(op):
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
return GetContainingCondContext(ctxt) is not None
def IsSwitch(op):
"""Return true if `op` is a Switch."""
return op.type == "Switch" or op.type == "RefSwitch"
def IsMerge(op):
"""Return true if `op` is a Merge."""
return op.type == "Merge" or op.type == "RefMerge"
def IsLoopEnter(op):
"""Returns true if `op` is an Enter."""
return op.type == "Enter" or op.type == "RefEnter"
def IsLoopExit(op):
"""Return true if `op` is an Exit."""
return op.type == "Exit" or op.type == "RefExit"
def IsCondSwitch(op):
"""Return true if `op` is the Switch for a conditional."""
if not IsSwitch(op):
return False
if not op.outputs:
return False
# Switch nodes are not part of the cond control flow context that they
# represent, so consider the consumers of its outputs to determine if it is
# cond switch or not. A switch is a cond switch iff all its consumers are in
# cond contexts.
is_cond_switch = True
for o in op.outputs:
for c in o.consumers():
ctxt = c._get_control_flow_context() # pylint: disable=protected-access
if IsLoopEnter(c):
ctxt = ctxt.outer_context
is_cond_switch = is_cond_switch and (ctxt is not None and
ctxt.IsCondContext())
return is_cond_switch
def IsCondMerge(op):
"""Return true if `op` is the Merge for a conditional."""
if not IsMerge(op):
return False
if not op.inputs:
return False
# Merge nodes are not part of the cond control flow context that they
# represent, so consider the inputs to the merge of to determine if it is
# cond merge or not: A merge is a cond merge iff all its inputs are in
# cond contexts.
is_cond_merge = True
for i in op.inputs:
ctxt = GetOutputContext(i.op)
is_cond_merge = is_cond_merge and ctxt is not None and ctxt.IsCondContext()
return is_cond_merge
def IsLoopSwitch(op):
"""Return true if `op` is the Switch for a while loop."""
if IsSwitch(op):
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
return ctxt is not None and ctxt.IsWhileContext() and not IsCondSwitch(op)
return False
def IsLoopMerge(op):
"""Return true if `op` is the Merge for a while loop."""
if IsMerge(op):
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
return ctxt is not None and ctxt.IsWhileContext() and not IsCondMerge(op)
return False
def IsLoopConstantEnter(op):
"""Return true iff op is a loop invariant."""
return IsLoopEnter(op) and op.get_attr("is_constant")
def GetLoopConstantEnter(value):
"""Return the enter op if we can infer `value` to be a loop invariant."""
id_ops = {"Switch", "RefSwitch", "Identity", "RefIdentity"}
op = value.op
while op.type in id_ops:
op = op.inputs[0].op
return op if IsLoopConstantEnter(op) else None
def GetOutputContext(op):
"""Return the control flow context for the output of an op."""
ctxt = op._get_control_flow_context() # pylint: disable=protected-access
# Exit nodes usually have a control flow context, except in the case where the
# exit node was imported via import_graph_def (in which case no nodes have
# control flow contexts).
if ctxt is not None and IsLoopExit(op):
ctxt = ctxt.outer_context
return ctxt
def GetContainingWhileContext(ctxt, stop_ctxt=None):
"""Returns the first ancestor WhileContext of `ctxt`.
Returns `ctxt` if `ctxt` is a WhileContext, or None if `ctxt` is not in a
while loop.
Args:
ctxt: ControlFlowContext
stop_ctxt: ControlFlowContext, optional. If provided, the search will end
if it sees stop_ctxt.
Returns:
`ctxt` if `ctxt` is a WhileContext, the most nested WhileContext containing
`ctxt`, or None if `ctxt` is not in a while loop. If `stop_ctxt` is not
`None`, this returns `ctxt` if it matches `stop_ctxt` in its traversal.
"""
while ctxt:
if ctxt.IsWhileContext() or ctxt == stop_ctxt: return ctxt
ctxt = ctxt.outer_context
return None
def GetContainingXLAContext(ctxt):
"""Returns the first ancestor XLAContext of `ctxt`.
Returns `ctxt` if `ctxt` is a XLAContext, or None if `ctxt` is not in a
while loop.
Args:
ctxt: ControlFlowContext
Returns:
`ctxt` if `ctxt` is a XLAContext, the most nested XLAContext containing
`ctxt`, or None if `ctxt` is not in a while loop.
"""
while ctxt:
if ctxt.IsXLAContext(): return ctxt
ctxt = ctxt.outer_context
return None
def GetContainingCondContext(ctxt):
"""Returns the first ancestor CondContext of `ctxt`.
Returns `ctxt` if `ctxt` is a CondContext, or None if `ctxt` is not in a cond.
Args:
ctxt: ControlFlowContext
Returns:
`ctxt` if `ctxt` is a CondContext, the most nested CondContext containing
`ctxt`, or None if `ctxt` is not in a cond.
"""
while ctxt:
if ctxt.IsCondContext(): return ctxt
ctxt = ctxt.outer_context
return None
def IsContainingContext(ctxt, maybe_containing_ctxt):
"""Returns true if `maybe_containing_ctxt` is or contains `ctxt`."""
while ctxt is not maybe_containing_ctxt:
if ctxt is None: return False
ctxt = ctxt.outer_context
return True
def OpInContext(op, ctxt):
return IsContainingContext(op._get_control_flow_context(), ctxt) # pylint: disable=protected-access
def TensorInContext(tensor, ctxt):
return OpInContext(tensor.op, ctxt)
def CheckInputFromValidContext(op, input_op):
"""Returns whether `input_op` can be used from `op`s context.
Conceptually, only inputs from op's while context or any ancestor while
context (including outside of any context) are valid. In practice, there are
many other edge cases as well.
Args:
op: Operation
input_op: Operation
Raises:
ValueError: if input_op is from an invalid context.
"""
op_ctxt = op._get_control_flow_context() # pylint: disable=protected-access
input_ctxt = GetOutputContext(input_op)
valid = False
if not input_ctxt:
# input_op isn't in a control flow context.
valid = True
elif op_ctxt is input_ctxt:
# input_op is in the same context as op.
valid = True
else:
while_ctxt = GetContainingWhileContext(op_ctxt)
input_while_ctxt = GetContainingWhileContext(input_ctxt)
if while_ctxt is None:
if input_while_ctxt is None:
# Neither op nor input_op is in a while loop, but one or both are in
# conds. We allow this, although execution will fail if the branch
# corresponding to input_op's cond context isn't taken.
valid = True
# Invalid if op isn't in a while loop and input_op is. Unless...
if IsLoopEnter(op):
# WhileContext._BuildLoop clears context for Enter nodes.
valid = True
if IsSwitch(op):
# CondContext.AddValue clears context for Switch nodes.
valid = True
elif IsContainingContext(while_ctxt, input_while_ctxt):
# input_op is in a while loop which contains op's while loop (or not in a
# while loop at all).
valid = True
elif (while_ctxt.grad_state and
IsContainingContext(while_ctxt.grad_state.forward_context,
input_while_ctxt)):
# op is in a gradient context and input_op is in the associated forward
# pass context or an ancestor thereof. This case is need to build while
# loop gradients.
# NOTE(skyewm): we theoretically also need this case for custom gradient
# functions that close over tensors from ancestor contexts, but I haven't
# verified this.
valid = True
elif (while_ctxt.grad_state and
while_ctxt.grad_state.forward_context is
input_while_ctxt._outer_context): # pylint: disable=protected-access
# op is in a gradient context and input_op is in a child of the associated
# forward pass context. This case is needed for the gradients of while
# loops with conds.
valid = True
elif (input_while_ctxt.grad_state and
input_while_ctxt.grad_state.forward_context is while_ctxt):
# input_op is in the gradient context of op's context. This case is needed
# when the gradient of a while loop gradient is requested (this will
# eventually fail unless there is a stop_gradient() or similar).
valid = True
elif (input_while_ctxt.grad_state and
input_ctxt.grad_state.forward_context.grad_state and
input_ctxt.grad_state.forward_context.grad_state.forward_context is
while_ctxt):
# input_op is in the grad grad context of op's context. This case is
# needed when the gradient of a while loop gradient is requested (this
# will eventually fail unless there is a stop_gradient() or similar).
valid = True
if not valid:
if while_ctxt:
error_msg = (
"Cannot use '%s' as input to '%s' because they are in different while"
" loops." % (op.name, input_op.name))
else:
error_msg = (
"Cannot use '%s' as input to '%s' because '%s' is in a while loop."
% (input_op.name, op.name, input_op.name))
# Log the error message plus the relevant stack traces. The stacks may be
# useful for debugging this error, but we don't want to raise an
# unreadable exception.
log_msg = error_msg
log_msg += "\n\n%s while context: %s" % (op.name, while_ctxt)
log_msg += "\n%s while context: %s" % (input_op.name, input_while_ctxt)
log_msg += "\n\nTraceback for %s:\n%s\nTraceback for %s:\n%s\n" % (
op.name, "".join(traceback.format_list(op.traceback)),
input_op.name, "".join(traceback.format_list(input_op.traceback)))
logging.info(log_msg)
raise ValueError(error_msg + " See info log for more details.")<|fim▁end|> |
def GraphOrParentsInXlaContext(graph): |
<|file_name|>bytefmt.go<|end_file_name|><|fim▁begin|>// Package bytefmt contains helper methods and constants for converting to and from a human-readable byte format.
//
// bytefmt.ByteSize(100.5*bytefmt.MEGABYTE) // "100.5M"
// bytefmt.ByteSize(uint64(1024)) // "1K"
//
// https://github.com/cloudfoundry/bytefmt/blob/master/bytes.go
package bytefmt
import (
"errors"
"strconv"
"strings"
"unicode"
)
const (
BYTE = 1 << (10 * iota)
KILOBYTE
MEGABYTE
GIGABYTE
TERABYTE
PETABYTE
EXABYTE
)
var invalidByteQuantityError = errors.New("byte quantity must be a positive integer with a unit of measurement like M, MB, MiB, G, GiB, or GB")
// ByteSize returns a human-readable byte string of the form 10M, 12.5K, and so forth. The following units are available:
// E: Exabyte
// P: Petabyte
// T: Terabyte
// G: Gigabyte
// M: Megabyte
// K: Kilobyte
// B: Byte
// The unit that results in the smallest number greater than or equal to 1 is always chosen.
func ByteSize(bytes uint64) string {
unit := ""
value := float64(bytes)
switch {
case bytes >= EXABYTE:
unit = "E"
value = value / EXABYTE
case bytes >= PETABYTE:
unit = "P"
value = value / PETABYTE
case bytes >= TERABYTE:
unit = "T"
value = value / TERABYTE
case bytes >= GIGABYTE:
unit = "G"
value = value / GIGABYTE
case bytes >= MEGABYTE:
unit = "M"
value = value / MEGABYTE
case bytes >= KILOBYTE:
unit = "K"
value = value / KILOBYTE
case bytes >= BYTE:
unit = "B"
case bytes == 0:
return "0B"
}
result := strconv.FormatFloat(value, 'f', 1, 64)
result = strings.TrimSuffix(result, ".0")
return result + unit
}
// ToMegabytes parses a string formatted by ByteSize as megabytes.
func ToMegabytes(s string) (uint64, error) {
bytes, err := ToBytes(s)
if err != nil {
return 0, err
}
return bytes / MEGABYTE, nil
}
// ToBytes parses a string formatted by ByteSize as bytes. Note binary-prefixed and SI prefixed units both mean a base-2 units
// KB = K = KiB = 1024
// MB = M = MiB = 1024 * K
// GB = G = GiB = 1024 * M
// TB = T = TiB = 1024 * G
// PB = P = PiB = 1024 * T
// EB = E = EiB = 1024 * P
func ToBytes(s string) (uint64, error) {
s = strings.TrimSpace(s)
s = strings.ToUpper(s)
i := strings.IndexFunc(s, unicode.IsLetter)
<|fim▁hole|> bytesString, multiple := s[:i], s[i:]
bytes, err := strconv.ParseFloat(bytesString, 64)
if err != nil || bytes < 0 {
return 0, invalidByteQuantityError
}
switch multiple {
case "E", "EB", "EIB":
return uint64(bytes * EXABYTE), nil
case "P", "PB", "PIB":
return uint64(bytes * PETABYTE), nil
case "T", "TB", "TIB":
return uint64(bytes * TERABYTE), nil
case "G", "GB", "GIB":
return uint64(bytes * GIGABYTE), nil
case "M", "MB", "MIB":
return uint64(bytes * MEGABYTE), nil
case "K", "KB", "KIB":
return uint64(bytes * KILOBYTE), nil
case "B":
return uint64(bytes), nil
default:
return 0, invalidByteQuantityError
}
}<|fim▁end|> | if i == -1 {
return 0, invalidByteQuantityError
}
|
<|file_name|>static-assert.rs<|end_file_name|><|fim▁begin|>#[static_assert]
static b: bool = true;
#[static_assert]
static c: bool = 1 == 1;
<|fim▁hole|>#[static_assert]
static f: bool = (4/2) == 2;
pub fn main() {
}<|fim▁end|> | #[static_assert]
static d: bool = 1 != 2;
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># Copyright 2021 The SLOE Logistic Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># See the License for the specific language governing permissions and
# limitations under the License.
"""Builds sloe_logistic package."""
from distutils import core
from distutils.command import build_clib
from pybind11.setup_helpers import build_ext
from pybind11.setup_helpers import Pybind11Extension
libraries = [
("scipy_brentq", {
"sources": ["third_party/py/scipy/optimize/Zeros/brentq.c",],
}),
]
ext_modules = [
Pybind11Extension("sloe_logistic.mle_param_integrands", [
"mle_param_integrands.cc",
]),
]
core.setup(
name="sloe_logistic",
version="0.0.1",
description="Implements SLOE method and Logistic Regression Inference",
long_description="Code to supplement the ICML submission SLOE: A Faster "
"Method for Statistical Inference in High-Dimensional Logistic Regression.",
packages=["sloe_logistic", "sloe_logistic.sloe_experiments"],
package_dir={
"sloe_logistic": ".",
"sloe_logistic.sloe_experiments": "sloe_experiments/"
},
libraries=libraries,
ext_modules=ext_modules,
cmdclass={
"build_ext": build_ext,
"build_clib": build_clib.build_clib,
},
zip_safe=False,
)<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
<|file_name|>test_l3_schedulers.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import datetime
import uuid
import mock
import testscenarios
from oslo.config import cfg
from oslo.db import exception as db_exc
from oslo.utils import importutils
from oslo.utils import timeutils
from sqlalchemy.orm import query
from neutron.common import constants
from neutron.common import topics
from neutron import context as q_context
from neutron.db import agents_db
from neutron.db import common_db_mixin
from neutron.db import db_base_plugin_v2 as db_v2
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_db
from neutron.db import l3_dvrscheduler_db
from neutron.db import l3_hamode_db
from neutron.db import l3_hascheduler_db
from neutron.extensions import l3agentscheduler as l3agent
from neutron import manager
from neutron.scheduler import l3_agent_scheduler
from neutron.tests import base
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_l3_plugin
from neutron.tests.unit import testlib_api
from neutron.tests.unit import testlib_plugin
# the below code is required for the following reason
# (as documented in testscenarios)
"""Multiply tests depending on their 'scenarios' attribute.
This can be assigned to 'load_tests' in any test module to make this
automatically work across tests in the module.
"""
load_tests = testscenarios.load_tests_apply_scenarios
HOST_DVR = 'my_l3_host_dvr'
DVR_L3_AGENT = {
'binary': 'neutron-l3-agent',
'host': HOST_DVR,<|fim▁hole|> 'configurations': {'agent_mode': 'dvr'},
'agent_type': constants.AGENT_TYPE_L3,
'start_flag': True
}
HOST_DVR_SNAT = 'my_l3_host_dvr_snat'
DVR_SNAT_L3_AGENT = {
'binary': 'neutron-l3-agent',
'host': HOST_DVR_SNAT,
'topic': topics.L3_AGENT,
'configurations': {'agent_mode': 'dvr_snat'},
'agent_type': constants.AGENT_TYPE_L3,
'start_flag': True
}
class FakeL3Scheduler(l3_agent_scheduler.L3Scheduler):
def schedule(self):
pass
def _choose_router_agent(self):
pass
def _choose_router_agents_for_ha(self):
pass
class L3SchedulerBaseTestCase(base.BaseTestCase):
def setUp(self):
super(L3SchedulerBaseTestCase, self).setUp()
self.scheduler = FakeL3Scheduler()
self.plugin = mock.Mock()
def test_auto_schedule_routers(self):
self.plugin.get_enabled_agent_on_host.return_value = [mock.ANY]
with contextlib.nested(
mock.patch.object(self.scheduler, 'get_routers_to_schedule'),
mock.patch.object(self.scheduler, 'get_routers_can_schedule')) as (
gs, gr):
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertTrue(result)
self.assertTrue(gs.called)
self.assertTrue(gr.called)
def test_auto_schedule_routers_no_agents(self):
self.plugin.get_enabled_agent_on_host.return_value = None
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test_auto_schedule_routers_no_unscheduled_routers(self):
type(self.plugin).supported_extension_aliases = (
mock.PropertyMock(return_value=[]))
with mock.patch.object(self.scheduler,
'get_routers_to_schedule') as mock_routers:
mock_routers.return_value = []
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test_auto_schedule_routers_no_target_routers(self):
self.plugin.get_enabled_agent_on_host.return_value = [mock.ANY]
with contextlib.nested(
mock.patch.object(self.scheduler, 'get_routers_to_schedule'),
mock.patch.object(self.scheduler, 'get_routers_can_schedule')) as (
mock_unscheduled_routers, mock_target_routers):
mock_unscheduled_routers.return_value = mock.ANY
mock_target_routers.return_value = None
result = self.scheduler.auto_schedule_routers(
self.plugin, mock.ANY, mock.ANY, mock.ANY)
self.assertTrue(self.plugin.get_enabled_agent_on_host.called)
self.assertFalse(result)
def test_get_routers_to_schedule_with_router_ids(self):
router_ids = ['foo_router_1', 'foo_router_2']
expected_routers = [
{'id': 'foo_router1'}, {'id': 'foo_router_2'}
]
self.plugin.get_routers.return_value = expected_routers
with mock.patch.object(self.scheduler,
'filter_unscheduled_routers') as mock_filter:
mock_filter.return_value = expected_routers
unscheduled_routers = self.scheduler.get_routers_to_schedule(
mock.ANY, self.plugin, router_ids)
mock_filter.assert_called_once_with(
mock.ANY, self.plugin, expected_routers)
self.assertEqual(expected_routers, unscheduled_routers)
def test_get_routers_to_schedule_without_router_ids(self):
expected_routers = [
{'id': 'foo_router1'}, {'id': 'foo_router_2'}
]
with mock.patch.object(self.scheduler,
'get_unscheduled_routers') as mock_get:
mock_get.return_value = expected_routers
unscheduled_routers = self.scheduler.get_routers_to_schedule(
mock.ANY, self.plugin)
mock_get.assert_called_once_with(mock.ANY, self.plugin)
self.assertEqual(expected_routers, unscheduled_routers)
def test_get_routers_to_schedule_exclude_distributed(self):
routers = [
{'id': 'foo_router1', 'distributed': True}, {'id': 'foo_router_2'}
]
expected_routers = [{'id': 'foo_router_2'}]
with mock.patch.object(self.scheduler,
'get_unscheduled_routers') as mock_get:
mock_get.return_value = routers
unscheduled_routers = self.scheduler.get_routers_to_schedule(
mock.ANY, self.plugin,
router_ids=None, exclude_distributed=True)
mock_get.assert_called_once_with(mock.ANY, self.plugin)
self.assertEqual(expected_routers, unscheduled_routers)
def _test_get_routers_can_schedule(self, routers, agent, target_routers):
self.plugin.get_l3_agent_candidates.return_value = agent
result = self.scheduler.get_routers_can_schedule(
mock.ANY, self.plugin, routers, mock.ANY)
self.assertEqual(target_routers, result)
def _test_filter_unscheduled_routers(self, routers, agents, expected):
self.plugin.get_l3_agents_hosting_routers.return_value = agents
unscheduled_routers = self.scheduler.filter_unscheduled_routers(
mock.ANY, self.plugin, routers)
self.assertEqual(expected, unscheduled_routers)
def test_filter_unscheduled_routers_already_scheduled(self):
self._test_filter_unscheduled_routers(
[{'id': 'foo_router1'}, {'id': 'foo_router_2'}],
[{'id': 'foo_agent_id'}], [])
def test_filter_unscheduled_routers_non_scheduled(self):
self._test_filter_unscheduled_routers(
[{'id': 'foo_router1'}, {'id': 'foo_router_2'}],
None, [{'id': 'foo_router1'}, {'id': 'foo_router_2'}])
def test_get_routers_can_schedule_with_compat_agent(self):
routers = [{'id': 'foo_router'}]
self._test_get_routers_can_schedule(routers, mock.ANY, routers)
def test_get_routers_can_schedule_with_no_compat_agent(self):
routers = [{'id': 'foo_router'}]
self._test_get_routers_can_schedule(routers, None, [])
def test_bind_routers_centralized(self):
routers = [{'id': 'foo_router'}]
with mock.patch.object(self.scheduler, 'bind_router') as mock_bind:
self.scheduler.bind_routers(mock.ANY, mock.ANY, routers, mock.ANY)
mock_bind.assert_called_once_with(mock.ANY, 'foo_router', mock.ANY)
def _test_bind_routers_ha(self, has_binding):
routers = [{'id': 'foo_router', 'ha': True, 'tenant_id': '42'}]
agent = agents_db.Agent(id='foo_agent')
with contextlib.nested(
mock.patch.object(self.scheduler, 'router_has_binding',
return_value=has_binding),
mock.patch.object(self.scheduler, 'create_ha_router_binding')) as (
mock_has_binding, mock_bind):
self.scheduler.bind_routers(mock.ANY, mock.ANY, routers, agent)
mock_has_binding.assert_called_once_with(mock.ANY, 'foo_router',
'foo_agent')
self.assertEqual(not has_binding, mock_bind.called)
def test_bind_routers_ha_has_binding(self):
self._test_bind_routers_ha(has_binding=True)
def test_bind_routers_ha_no_binding(self):
self._test_bind_routers_ha(has_binding=False)
class L3SchedulerBaseMixin(object):
def _register_l3_agent(self, host, agent_mode='legacy', plugin=None):
if not plugin:
plugin = self.plugin
agent = {
'binary': 'neutron-l3-agent',
'host': host,
'topic': topics.L3_AGENT,
'configurations': {'agent_mode': agent_mode},
'agent_type': constants.AGENT_TYPE_L3,
'start_flag': True
}
callback = agents_db.AgentExtRpcCallback()
callback.report_state(self.adminContext,
agent_state={'agent_state': agent},
time=timeutils.strtime())
agent_db = plugin.get_agents_db(self.adminContext,
filters={'host': [agent['host']]})
return agent_db[0]
def _register_l3_agents(self, plugin=None):
self.agent1 = self._register_l3_agent('host_1', plugin=plugin)
self.agent_id1 = self.agent1.id
self.agent2 = self._register_l3_agent('host_2', plugin=plugin)
self.agent_id2 = self.agent2.id
def _register_l3_dvr_agents(self):
callback = agents_db.AgentExtRpcCallback()
callback.report_state(self.adminContext,
agent_state={'agent_state': DVR_L3_AGENT},
time=timeutils.strtime())
agent_db = self.plugin.get_agents_db(self.adminContext,
filters={'host': [HOST_DVR]})
self.l3_dvr_agent = agent_db[0]
self.l3_dvr_agent_id = agent_db[0].id
callback.report_state(self.adminContext,
agent_state={'agent_state': DVR_SNAT_L3_AGENT},
time=timeutils.strtime())
agent_db = self.plugin.get_agents_db(self.adminContext,
filters={'host': [HOST_DVR_SNAT]})
self.l3_dvr_snat_id = agent_db[0].id
self.l3_dvr_snat_agent = agent_db[0]
def _set_l3_agent_admin_state(self, context, agent_id, state=True):
update = {'agent': {'admin_state_up': state}}
self.plugin.update_agent(context, agent_id, update)
def _set_l3_agent_dead(self, agent_id):
update = {
'agent': {
'heartbeat_timestamp':
timeutils.utcnow() - datetime.timedelta(hours=1)}}
self.plugin.update_agent(self.adminContext, agent_id, update)
@contextlib.contextmanager
def router_with_ext_gw(self, name='router1', admin_state_up=True,
fmt=None, tenant_id=str(uuid.uuid4()),
external_gateway_info=None,
subnet=None, set_context=False,
**kwargs):
router = self._make_router(fmt or self.fmt, tenant_id, name,
admin_state_up, external_gateway_info,
set_context, **kwargs)
self._add_external_gateway_to_router(
router['router']['id'],
subnet['subnet']['network_id'])
yield router
self._remove_external_gateway_from_router(
router['router']['id'], subnet['subnet']['network_id'])
self._delete('routers', router['router']['id'])
class L3SchedulerTestBaseMixin(object):
def _test_add_router_to_l3_agent(self,
distributed=False,
already_scheduled=False,
external_gw=None):
agent_id = self.agent_id1
agent = self.agent1
if distributed:
self._register_l3_dvr_agents()
agent_id = self.l3_dvr_snat_id
agent = self.l3_dvr_snat_agent
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
router['router']['distributed'] = distributed
router['router']['external_gateway_info'] = external_gw
if already_scheduled:
self._test_schedule_bind_router(agent, router)
with contextlib.nested(
mock.patch.object(self, "validate_agent_router_combination"),
mock.patch.object(self, "create_router_to_agent_binding"),
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router'])
) as (valid, auto_s, gr):
self.add_router_to_l3_agent(self.adminContext, agent_id,
router['router']['id'])
self.assertNotEqual(already_scheduled, auto_s.called)
def _create_router_for_l3_agent_dvr_test(self,
distributed=False,
external_gw=None):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
router['router']['distributed'] = distributed
router['router']['external_gateway_info'] = external_gw
return router
def _prepare_l3_agent_dvr_move_exceptions(self,
distributed=False,
external_gw=None,
agent_id=None,
expected_exception=None):
router = self._create_router_for_l3_agent_dvr_test(
distributed=distributed, external_gw=external_gw)
with contextlib.nested(
mock.patch.object(self, "create_router_to_agent_binding"),
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router'])):
self.assertRaises(expected_exception,
self.add_router_to_l3_agent,
self.adminContext, agent_id,
router['router']['id'])
def test_add_router_to_l3_agent_mismatch_error_dvr_to_legacy(self):
self._register_l3_agents()
self._prepare_l3_agent_dvr_move_exceptions(
distributed=True,
agent_id=self.agent_id1,
expected_exception=l3agent.RouterL3AgentMismatch)
def test_add_router_to_l3_agent_mismatch_error_legacy_to_dvr(self):
self._register_l3_dvr_agents()
self._prepare_l3_agent_dvr_move_exceptions(
agent_id=self.l3_dvr_agent_id,
expected_exception=l3agent.RouterL3AgentMismatch)
def test_add_router_to_l3_agent_mismatch_error_dvr_to_dvr(self):
self._register_l3_dvr_agents()
self._prepare_l3_agent_dvr_move_exceptions(
distributed=True,
agent_id=self.l3_dvr_agent_id,
expected_exception=l3agent.DVRL3CannotAssignToDvrAgent)
def test_add_router_to_l3_agent_dvr_to_snat(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._register_l3_dvr_agents()
agent_id = self.l3_dvr_snat_id
agent = self.l3_dvr_snat_agent
router = self._create_router_for_l3_agent_dvr_test(
distributed=True,
external_gw=external_gw_info)
with contextlib.nested(
mock.patch.object(self, "validate_agent_router_combination"),
mock.patch.object(self, "create_router_to_agent_binding"),
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=router['router'])
) as (valid_agent_rtr, rtr_agent_binding, get_rtr):
self.add_router_to_l3_agent(self.adminContext, agent_id,
router['router']['id'])
rtr_agent_binding.assert_called_once_with(
self.adminContext, agent, router['router'])
def test_add_router_to_l3_agent(self):
self._test_add_router_to_l3_agent()
def test_add_distributed_router_to_l3_agent(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._test_add_router_to_l3_agent(distributed=True,
external_gw=external_gw_info)
def test_add_router_to_l3_agent_already_scheduled(self):
self._test_add_router_to_l3_agent(already_scheduled=True)
def test_add_distributed_router_to_l3_agent_already_scheduled(self):
external_gw_info = {
"network_id": str(uuid.uuid4()),
"enable_snat": True
}
self._test_add_router_to_l3_agent(distributed=True,
already_scheduled=True,
external_gw=external_gw_info)
def _prepare_schedule_dvr_tests(self):
scheduler = l3_agent_scheduler.ChanceScheduler()
agent = agents_db.Agent()
agent.admin_state_up = True
agent.heartbeat_timestamp = timeutils.utcnow()
plugin = mock.Mock()
plugin.get_l3_agents_hosting_routers.return_value = []
plugin.get_l3_agents.return_value = [agent]
plugin.get_l3_agent_candidates.return_value = [agent]
return scheduler, agent, plugin
def test_schedule_dvr_router_without_snatbinding_and_no_gw(self):
scheduler, agent, plugin = self._prepare_schedule_dvr_tests()
sync_router = {
'id': 'foo_router_id',
'distributed': True
}
plugin.get_router.return_value = sync_router
with contextlib.nested(
mock.patch.object(scheduler, 'bind_router'),
mock.patch.object(
plugin, 'get_snat_bindings', return_value=False)
):
scheduler._schedule_router(
plugin, self.adminContext, 'foo_router_id', None)
expected_calls = [
mock.call.get_router(mock.ANY, 'foo_router_id'),
mock.call.get_l3_agents_hosting_routers(
mock.ANY, ['foo_router_id'], admin_state_up=True),
mock.call.get_l3_agents(mock.ANY, active=True),
mock.call.get_l3_agent_candidates(mock.ANY, sync_router, [agent]),
]
plugin.assert_has_calls(expected_calls)
def test_schedule_dvr_router_with_snatbinding_no_gw(self):
scheduler, agent, plugin = self._prepare_schedule_dvr_tests()
sync_router = {'id': 'foo_router_id',
'distributed': True}
plugin.get_router.return_value = sync_router
with contextlib.nested(
mock.patch.object(scheduler, 'bind_router'),
mock.patch.object(plugin, 'get_snat_bindings', return_value=True)):
scheduler._schedule_router(
plugin, self.adminContext, 'foo_router_id', None)
expected_calls = [
mock.call.get_router(mock.ANY, 'foo_router_id'),
mock.call.unbind_snat_servicenode(mock.ANY, 'foo_router_id'),
mock.call.get_l3_agents_hosting_routers(
mock.ANY, ['foo_router_id'], admin_state_up=True),
mock.call.get_l3_agents(mock.ANY, active=True),
mock.call.get_l3_agent_candidates(mock.ANY, sync_router, [agent]),
]
plugin.assert_has_calls(expected_calls)
def test_schedule_router_distributed(self):
scheduler, agent, plugin = self._prepare_schedule_dvr_tests()
sync_router = {
'id': 'foo_router_id',
'distributed': True,
'external_gateway_info': {
'network_id': str(uuid.uuid4()),
'enable_snat': True
}
}
plugin.get_router.return_value = sync_router
with contextlib.nested(
mock.patch.object(scheduler, 'bind_router'),
mock.patch.object(
plugin, 'get_snat_bindings', return_value=False)
):
scheduler._schedule_router(
plugin, self.adminContext, 'foo_router_id', None)
expected_calls = [
mock.call.get_router(mock.ANY, 'foo_router_id'),
mock.call.schedule_snat_router(
mock.ANY, 'foo_router_id', sync_router),
mock.call.get_l3_agents_hosting_routers(
mock.ANY, ['foo_router_id'], admin_state_up=True),
mock.call.get_l3_agents(mock.ANY, active=True),
mock.call.get_l3_agent_candidates(mock.ANY, sync_router, [agent]),
]
plugin.assert_has_calls(expected_calls)
def _test_schedule_bind_router(self, agent, router):
ctx = self.adminContext
session = ctx.session
db = l3_agentschedulers_db.RouterL3AgentBinding
scheduler = l3_agent_scheduler.ChanceScheduler()
rid = router['router']['id']
scheduler.bind_router(ctx, rid, agent)
results = (session.query(db).filter_by(router_id=rid).all())
self.assertTrue(len(results) > 0)
self.assertIn(agent.id, [bind.l3_agent_id for bind in results])
def test_bind_new_router(self):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r1')
with mock.patch.object(l3_agent_scheduler.LOG, 'debug') as flog:
self._test_schedule_bind_router(self.agent1, router)
self.assertEqual(1, flog.call_count)
args, kwargs = flog.call_args
self.assertIn('is scheduled', args[0])
def test_bind_absent_router(self):
scheduler = l3_agent_scheduler.ChanceScheduler()
# checking that bind_router() is not throwing
# when supplied with router_id of non-existing router
scheduler.bind_router(self.adminContext, "dummyID", self.agent1)
def test_bind_existing_router(self):
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
self._test_schedule_bind_router(self.agent1, router)
with mock.patch.object(l3_agent_scheduler.LOG, 'debug') as flog:
self._test_schedule_bind_router(self.agent1, router)
self.assertEqual(1, flog.call_count)
args, kwargs = flog.call_args
self.assertIn('has already been scheduled', args[0])
def _check_get_l3_agent_candidates(
self, router, agent_list, exp_host, count=1):
candidates = self.get_l3_agent_candidates(self.adminContext,
router, agent_list)
self.assertEqual(len(candidates), count)
if count:
self.assertEqual(candidates[0]['host'], exp_host)
def test_get_l3_agent_candidates_legacy(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
# test legacy agent_mode case: only legacy agent should be candidate
router['distributed'] = False
exp_host = 'host_1'
self._check_get_l3_agent_candidates(router, agent_list, exp_host)
def test_get_l3_agent_candidates_dvr(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
# test dvr agent_mode case only dvr agent should be candidate
router['distributed'] = True
exp_host = DVR_L3_AGENT.get('host')
self.check_ports_exist_on_l3agent = mock.Mock(return_value=True)
self._check_get_l3_agent_candidates(router, agent_list, exp_host)
def test_get_l3_agent_candidates_dvr_no_vms(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
agent_list = [self.agent1, self.l3_dvr_agent]
exp_host = DVR_L3_AGENT.get('host')
router['distributed'] = True
# Test no VMs present case
self.check_ports_exist_on_l3agent = mock.Mock(return_value=False)
self._check_get_l3_agent_candidates(
router, agent_list, exp_host, count=0)
def test_get_l3_agent_candidates_dvr_snat(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
router['distributed'] = True
agent_list = [self.l3_dvr_snat_agent]
exp_host = DVR_SNAT_L3_AGENT.get('host')
self.check_ports_exist_on_l3agent = mock.Mock(return_value=True)
self._check_get_l3_agent_candidates(router, agent_list, exp_host)
def test_get_l3_agent_candidates_dvr_snat_no_vms(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
router['distributed'] = True
agent_list = [self.l3_dvr_snat_agent]
exp_host = DVR_SNAT_L3_AGENT.get('host')
self.check_ports_exist_on_l3agent = mock.Mock(return_value=False)
# Test no VMs present case
self.check_ports_exist_on_l3agent.return_value = False
self._check_get_l3_agent_candidates(
router, agent_list, exp_host, count=0)
def test_get_l3_agent_candidates_centralized(self):
self._register_l3_dvr_agents()
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
# check centralized test case
router['distributed'] = False
exp_host = DVR_SNAT_L3_AGENT.get('host')
agent_list = [self.l3_dvr_snat_agent]
self._check_get_l3_agent_candidates(router, agent_list, exp_host)
def _prepare_check_ports_exist_tests(self):
l3_agent = agents_db.Agent()
l3_agent.admin_state_up = True
l3_agent.host = 'host_1'
router = self._make_router(self.fmt,
tenant_id=str(uuid.uuid4()),
name='r2')
router['external_gateway_info'] = None
router['id'] = str(uuid.uuid4())
self.plugin.get_ports = mock.Mock(return_value=[])
self.get_subnet_ids_on_router = mock.Mock(return_value=[])
return l3_agent, router
def test_check_ports_exist_on_l3agent_no_subnets(self):
l3_agent, router = self._prepare_check_ports_exist_tests()
with mock.patch.object(manager.NeutronManager,
'get_plugin') as getp:
getp.return_value = self.plugin
# no subnets
val = self.check_ports_exist_on_l3agent(self.adminContext,
l3_agent, router['id'])
self.assertFalse(val)
def test_check_ports_exist_on_l3agent_no_subnet_match(self):
l3_agent, router = self._prepare_check_ports_exist_tests()
with mock.patch.object(manager.NeutronManager,
'get_plugin') as getp:
getp.return_value = self.plugin
# no matching subnet
self.get_subnet_ids_on_router.return_value = [str(uuid.uuid4())]
val = self.check_ports_exist_on_l3agent(self.adminContext,
l3_agent, router['id'])
self.assertFalse(val)
def test_check_ports_exist_on_l3agent_subnet_match(self):
l3_agent, router = self._prepare_check_ports_exist_tests()
with mock.patch.object(manager.NeutronManager,
'get_plugin') as getp:
getp.return_value = self.plugin
# matching subnet
port = {'subnet_id': str(uuid.uuid4()),
'binding:host_id': 'host_1',
'device_owner': 'compute:',
'id': 1234}
self.plugin.get_ports.return_value = [port]
self.plugin.get_subnet_ids_on_router = mock.Mock(
return_value=[port['subnet_id']])
val = self.check_ports_exist_on_l3agent(self.adminContext,
l3_agent, router['id'])
self.assertTrue(val)
class L3SchedulerTestCase(l3_agentschedulers_db.L3AgentSchedulerDbMixin,
l3_db.L3_NAT_db_mixin,
common_db_mixin.CommonDbMixin,
test_db_plugin.NeutronDbPluginV2TestCase,
test_l3_plugin.L3NatTestCaseMixin,
L3SchedulerBaseMixin,
L3SchedulerTestBaseMixin):
def setUp(self):
self.mock_rescheduling = False
ext_mgr = test_l3_plugin.L3TestExtensionManager()
plugin_str = ('neutron.tests.unit.test_l3_plugin.'
'TestL3NatIntAgentSchedulingPlugin')
super(L3SchedulerTestCase, self).setUp(plugin=plugin_str,
ext_mgr=ext_mgr)
self.adminContext = q_context.get_admin_context()
self.plugin = manager.NeutronManager.get_plugin()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.ChanceScheduler'
)
self._register_l3_agents()
class L3AgentChanceSchedulerTestCase(L3SchedulerTestCase):
def test_random_scheduling(self):
random_patch = mock.patch('random.choice')
random_mock = random_patch.start()
def side_effect(seq):
return seq[0]
random_mock.side_effect = side_effect
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
self.assertEqual(random_mock.call_count, 1)
with self.router_with_ext_gw(name='r2', subnet=subnet) as r2:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r2['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
self.assertEqual(random_mock.call_count, 2)
random_patch.stop()
def test_scheduler_auto_schedule_when_agent_added(self):
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id1, False)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(0, len(agents))
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id1, True)
self.plugin.auto_schedule_routers(self.adminContext,
'host_1',
[r1['router']['id']])
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual('host_1', agents[0]['host'])
class L3AgentLeastRoutersSchedulerTestCase(L3SchedulerTestCase):
def setUp(self):
super(L3AgentLeastRoutersSchedulerTestCase, self).setUp()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.LeastRoutersScheduler'
)
def test_scheduler(self):
# disable one agent to force the scheduling to the only one.
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
with self.subnet() as subnet:
self._set_net_external(subnet['subnet']['network_id'])
with self.router_with_ext_gw(name='r1', subnet=subnet) as r1:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r1['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
agent_id1 = agents[0]['id']
with self.router_with_ext_gw(name='r2', subnet=subnet) as r2:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r2['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
agent_id2 = agents[0]['id']
self.assertEqual(agent_id1, agent_id2)
# re-enable the second agent to see whether the next router
# spawned will be on this one.
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, True)
with self.router_with_ext_gw(name='r3',
subnet=subnet) as r3:
agents = self.get_l3_agents_hosting_routers(
self.adminContext, [r3['router']['id']],
admin_state_up=True)
self.assertEqual(len(agents), 1)
agent_id3 = agents[0]['id']
self.assertNotEqual(agent_id1, agent_id3)
class L3DvrScheduler(l3_db.L3_NAT_db_mixin,
l3_dvrscheduler_db.L3_DVRsch_db_mixin):
pass
class L3DvrSchedulerTestCase(testlib_api.SqlTestCase,
testlib_plugin.PluginSetupHelper):
def setUp(self):
plugin = 'neutron.plugins.ml2.plugin.Ml2Plugin'
self.setup_coreplugin(plugin)
super(L3DvrSchedulerTestCase, self).setUp()
self.adminContext = q_context.get_admin_context()
self.dut = L3DvrScheduler()
def test_dvr_update_router_addvm(self):
port = {
'device_id': 'abcd',
'device_owner': 'compute:nova',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.3'
}
]
}
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': 'network:router_interface_distributed',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with contextlib.nested(
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_ports', return_value=[dvr_port]),
mock.patch('neutron.manager.NeutronManager.get_service_plugins',
return_value=mock.Mock()),
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=r1),
mock.patch('neutron.api.rpc.agentnotifiers.l3_rpc_agent_api'
'.L3AgentNotifyAPI')):
self.dut.dvr_update_router_addvm(self.adminContext, port)
def test_get_dvr_routers_by_portid(self):
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': 'network:router_interface_distributed',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with contextlib.nested(
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_port', return_value=dvr_port),
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_ports', return_value=[dvr_port])):
router_id = self.dut.get_dvr_routers_by_portid(self.adminContext,
dvr_port['id'])
self.assertEqual(router_id.pop(), r1['id'])
def test_get_subnet_ids_on_router(self):
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'device_owner': 'network:router_interface_distributed',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with contextlib.nested(
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_ports', return_value=[dvr_port])):
sub_ids = self.dut.get_subnet_ids_on_router(self.adminContext,
r1['id'])
self.assertEqual(sub_ids.pop(),
dvr_port.get('fixed_ips').pop(0).get('subnet_id'))
def test_check_ports_active_on_host_and_subnet(self):
dvr_port = {
'id': 'dvr_port1',
'device_id': 'r1',
'status': 'ACTIVE',
'binding:host_id': 'thisHost',
'device_owner': 'compute:nova',
'fixed_ips': [
{
'subnet_id': '80947d4a-fbc8-484b-9f92-623a6bfcf3e0',
'ip_address': '10.10.10.1'
}
]
}
r1 = {
'id': 'r1',
'distributed': True,
}
with contextlib.nested(
mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
'.get_ports', return_value=[dvr_port]),
mock.patch('neutron.manager.NeutronManager.get_service_plugins',
return_value=mock.Mock()),
mock.patch('neutron.db.l3_db.L3_NAT_db_mixin.get_router',
return_value=r1),
mock.patch('neutron.api.rpc.agentnotifiers.l3_rpc_agent_api'
'.L3AgentNotifyAPI')):
sub_ids = self.dut.get_subnet_ids_on_router(self.adminContext,
r1['id'])
result = self.dut.check_ports_active_on_host_and_subnet(
self.adminContext,
'thisHost', 'dvr_port1',
sub_ids)
self.assertFalse(result)
def _test_dvr_serviced_port_exists_on_subnet(self, port):
with mock.patch('neutron.db.db_base_plugin_v2.NeutronDbPluginV2.'
'get_ports', return_value=[port]):
result = self.dut.check_ports_active_on_host_and_subnet(
self.adminContext,
'thisHost',
'dvr1-intf-id',
'my-subnet-id')
self.assertTrue(result)
def test_dvr_serviced_vip_port_exists_on_subnet(self):
vip_port = {
'id': 'lbaas-vip-port1',
'device_id': 'vip-pool-id',
'status': 'ACTIVE',
'binding:host_id': 'thisHost',
'device_owner': constants.DEVICE_OWNER_LOADBALANCER,
'fixed_ips': [
{
'subnet_id': 'my-subnet-id',
'ip_address': '10.10.10.1'
}
]
}
self._test_dvr_serviced_port_exists_on_subnet(port=vip_port)
def test_dvr_serviced_dhcp_port_exists_on_subnet(self):
dhcp_port = {
'id': 'dhcp-port1',
'device_id': 'dhcp-net-id',
'status': 'ACTIVE',
'binding:host_id': 'thisHost',
'device_owner': constants.DEVICE_OWNER_DHCP,
'fixed_ips': [
{
'subnet_id': 'my-subnet-id',
'ip_address': '10.10.10.2'
}
]
}
self._test_dvr_serviced_port_exists_on_subnet(port=dhcp_port)
def _prepare_schedule_snat_tests(self):
agent = agents_db.Agent()
agent.admin_state_up = True
agent.heartbeat_timestamp = timeutils.utcnow()
router = {
'id': 'foo_router_id',
'distributed': True,
'external_gateway_info': {
'network_id': str(uuid.uuid4()),
'enable_snat': True
}
}
return agent, router
def test_schedule_snat_router_duplicate_entry(self):
self._prepare_schedule_snat_tests()
with contextlib.nested(
mock.patch.object(self.dut, 'get_l3_agents'),
mock.patch.object(self.dut, 'get_snat_candidates'),
mock.patch.object(self.dut, 'bind_snat_servicenode',
side_effect=db_exc.DBDuplicateEntry()),
mock.patch.object(self.dut, 'bind_dvr_router_servicenode')
) as (mock_gl3, mock_snat_canidates, mock_bind_snat, mock_bind_dvr):
self.dut.schedule_snat_router(self.adminContext, 'foo', 'bar')
self.assertTrue(mock_bind_snat.called)
self.assertFalse(mock_bind_dvr.called)
def test_schedule_router_unbind_snat_servicenode_negativetest(self):
router = {
'id': 'foo_router_id',
'distributed': True
}
with contextlib.nested(
mock.patch.object(self.dut, 'get_router'),
mock.patch.object(self.dut, 'get_snat_bindings'),
mock.patch.object(self.dut, 'unbind_snat_servicenode')
) as (mock_rd, mock_snat_bind, mock_unbind):
mock_rd.return_value = router
mock_snat_bind.return_value = False
self.dut.schedule_snat_router(
self.adminContext, 'foo_router_id', router)
self.assertFalse(mock_unbind.called)
def test_schedule_snat_router_with_snat_candidates(self):
agent, router = self._prepare_schedule_snat_tests()
with contextlib.nested(
mock.patch.object(query.Query, 'first'),
mock.patch.object(self.dut, 'get_l3_agents'),
mock.patch.object(self.dut, 'get_snat_candidates'),
mock.patch.object(self.dut, 'get_router'),
mock.patch.object(self.dut, 'bind_dvr_router_servicenode'),
mock.patch.object(self.dut, 'bind_snat_servicenode')) as (
mock_query, mock_agents,
mock_candidates, mock_rd, mock_dvr, mock_bind):
mock_rd.return_value = router
mock_query.return_value = []
mock_agents.return_value = [agent]
mock_candidates.return_value = [agent]
self.dut.schedule_snat_router(
self.adminContext, 'foo_router_id', mock.ANY)
mock_bind.assert_called_once_with(
self.adminContext, 'foo_router_id', [agent])
def test_unbind_snat_servicenode(self):
router_id = 'foo_router_id'
core_plugin = mock.PropertyMock()
type(self.dut)._core_plugin = core_plugin
(self.dut._core_plugin.get_ports_on_host_by_subnet.
return_value) = []
core_plugin.reset_mock()
l3_notifier = mock.PropertyMock()
type(self.dut).l3_rpc_notifier = l3_notifier
binding = l3_dvrscheduler_db.CentralizedSnatL3AgentBinding(
router_id=router_id, l3_agent_id='foo_l3_agent_id',
l3_agent=agents_db.Agent())
with contextlib.nested(
mock.patch.object(query.Query, 'one'),
mock.patch.object(self.adminContext.session, 'delete'),
mock.patch.object(query.Query, 'delete'),
mock.patch.object(self.dut, 'get_subnet_ids_on_router')) as (
mock_query, mock_session, mock_delete, mock_get_subnets):
mock_query.return_value = binding
mock_get_subnets.return_value = ['foo_subnet_id']
self.dut.unbind_snat_servicenode(self.adminContext, router_id)
mock_get_subnets.assert_called_with(self.adminContext, router_id)
self.assertTrue(mock_session.call_count)
self.assertTrue(mock_delete.call_count)
core_plugin.assert_called_once_with()
l3_notifier.assert_called_once_with()
class L3HAPlugin(db_v2.NeutronDbPluginV2,
l3_hamode_db.L3_HA_NAT_db_mixin,
l3_hascheduler_db.L3_HA_scheduler_db_mixin):
supported_extension_aliases = ["l3-ha"]
class L3HATestCaseMixin(testlib_api.SqlTestCase,
L3SchedulerBaseMixin,
testlib_plugin.PluginSetupHelper):
def setUp(self):
super(L3HATestCaseMixin, self).setUp()
self.adminContext = q_context.get_admin_context()
self.plugin = L3HAPlugin()
self.setup_coreplugin('neutron.plugins.ml2.plugin.Ml2Plugin')
mock.patch.object(l3_hamode_db.L3_HA_NAT_db_mixin,
'_notify_ha_interfaces_updated').start()
cfg.CONF.set_override('max_l3_agents_per_router', 0)
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.ChanceScheduler'
)
self._register_l3_agents()
def _create_ha_router(self, ha=True, tenant_id='tenant1'):
self.adminContext.tenant_id = tenant_id
router = {'name': 'router1', 'admin_state_up': True}
if ha is not None:
router['ha'] = ha
return self.plugin.create_router(self.adminContext,
{'router': router})
class L3_HA_scheduler_db_mixinTestCase(L3HATestCaseMixin):
def _register_l3_agents(self, plugin=None):
super(L3_HA_scheduler_db_mixinTestCase,
self)._register_l3_agents(plugin=plugin)
self.agent3 = self._register_l3_agent('host_3', plugin=plugin)
self.agent_id3 = self.agent3.id
self.agent4 = self._register_l3_agent('host_4', plugin=plugin)
self.agent_id4 = self.agent4.id
def test_get_ha_routers_l3_agents_count(self):
router1 = self._create_ha_router()
router2 = self._create_ha_router()
router3 = self._create_ha_router(ha=False)
self.plugin.schedule_router(self.adminContext, router1['id'])
self.plugin.schedule_router(self.adminContext, router2['id'])
self.plugin.schedule_router(self.adminContext, router3['id'])
result = self.plugin.get_ha_routers_l3_agents_count(
self.adminContext).all()
self.assertEqual(2, len(result))
self.assertIn((router1['id'], router1['tenant_id'], 4), result)
self.assertIn((router2['id'], router2['tenant_id'], 4), result)
self.assertNotIn((router3['id'], router3['tenant_id'], mock.ANY),
result)
def test_get_ordered_l3_agents_by_num_routers(self):
router1 = self._create_ha_router()
router2 = self._create_ha_router()
router3 = self._create_ha_router(ha=False)
router4 = self._create_ha_router(ha=False)
# Agent 1 will host 0 routers, agent 2 will host 1, agent 3 will
# host 2, and agent 4 will host 3.
self.plugin.schedule_router(self.adminContext, router1['id'],
candidates=[self.agent2, self.agent4])
self.plugin.schedule_router(self.adminContext, router2['id'],
candidates=[self.agent3, self.agent4])
self.plugin.schedule_router(self.adminContext, router3['id'],
candidates=[self.agent3])
self.plugin.schedule_router(self.adminContext, router4['id'],
candidates=[self.agent4])
agent_ids = [self.agent_id1, self.agent_id2, self.agent_id3,
self.agent_id4]
result = self.plugin.get_l3_agents_ordered_by_num_routers(
self.adminContext, agent_ids)
self.assertEqual(agent_ids, [record['id'] for record in result])
class L3AgentSchedulerDbMixinTestCase(L3HATestCaseMixin):
def test_reschedule_ha_routers_from_down_agents(self):
router = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, router['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
self._set_l3_agent_dead(self.agent_id1)
with mock.patch.object(self.plugin, 'reschedule_router') as reschedule:
self.plugin.reschedule_routers_from_down_agents()
self.assertFalse(reschedule.called)
class L3HAChanceSchedulerTestCase(L3HATestCaseMixin):
def test_scheduler_with_ha_enabled(self):
router = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, router['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
for agent in agents:
sync_data = self.plugin.get_ha_sync_data_for_host(
self.adminContext, router_ids=[router['id']],
host=agent.host)
self.assertEqual(1, len(sync_data))
interface = sync_data[0][constants.HA_INTERFACE_KEY]
self.assertIsNotNone(interface)
def test_auto_schedule(self):
router = self._create_ha_router()
self.plugin.auto_schedule_routers(
self.adminContext, self.agent1.host, None)
self.plugin.auto_schedule_routers(
self.adminContext, self.agent2.host, None)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']])
self.assertEqual(2, len(agents))
def test_auto_schedule_specific_router_when_agent_added(self):
self._auto_schedule_when_agent_added(True)
def test_auto_schedule_all_routers_when_agent_added(self):
self._auto_schedule_when_agent_added(False)
def _auto_schedule_when_agent_added(self, specific_router):
router = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, router['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id1, agent_ids)
self.assertIn(self.agent_id2, agent_ids)
agent = self._register_l3_agent('host_3')
self.agent_id3 = agent.id
routers_to_auto_schedule = [router['id']] if specific_router else []
self.plugin.auto_schedule_routers(self.adminContext,
'host_3',
routers_to_auto_schedule)
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [router['id']],
admin_state_up=True)
self.assertEqual(3, len(agents))
# Simulate agent restart to make sure we don't try to re-bind
self.plugin.auto_schedule_routers(self.adminContext,
'host_3',
routers_to_auto_schedule)
def test_scheduler_with_ha_enabled_not_enough_agent(self):
r1 = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, r1['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, False)
r2 = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, r2['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r2['id']],
admin_state_up=True)
self.assertEqual(0, len(agents))
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id2, True)
class L3HALeastRoutersSchedulerTestCase(L3HATestCaseMixin):
def _register_l3_agents(self, plugin=None):
super(L3HALeastRoutersSchedulerTestCase,
self)._register_l3_agents(plugin=plugin)
agent = self._register_l3_agent('host_3', plugin=plugin)
self.agent_id3 = agent.id
agent = self._register_l3_agent('host_4', plugin=plugin)
self.agent_id4 = agent.id
def setUp(self):
super(L3HALeastRoutersSchedulerTestCase, self).setUp()
self.plugin.router_scheduler = importutils.import_object(
'neutron.scheduler.l3_agent_scheduler.LeastRoutersScheduler'
)
def test_scheduler(self):
cfg.CONF.set_override('max_l3_agents_per_router', 2)
# disable the third agent to be sure that the router will
# be scheduled of the two firsts
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id3, False)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id4, False)
r1 = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, r1['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r1['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id1, agent_ids)
self.assertIn(self.agent_id2, agent_ids)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id3, True)
self._set_l3_agent_admin_state(self.adminContext,
self.agent_id4, True)
r2 = self._create_ha_router()
self.plugin.schedule_router(self.adminContext, r2['id'])
agents = self.plugin.get_l3_agents_hosting_routers(
self.adminContext, [r2['id']],
admin_state_up=True)
self.assertEqual(2, len(agents))
agent_ids = [agent['id'] for agent in agents]
self.assertIn(self.agent_id3, agent_ids)
self.assertIn(self.agent_id4, agent_ids)
class TestGetL3AgentsWithAgentModeFilter(testlib_api.SqlTestCase,
testlib_plugin.PluginSetupHelper,
L3SchedulerBaseMixin):
"""Test cases to test get_l3_agents.
This class tests the L3AgentSchedulerDbMixin.get_l3_agents()
for the 'agent_mode' filter with various values.
5 l3 agents are registered in the order - legacy, dvr_snat, dvr, fake_mode
and legacy
"""
scenarios = [
('no filter',
dict(agent_modes=[],
expected_agent_modes=['legacy', 'dvr_snat', 'dvr',
'fake_mode', 'legacy'])),
('legacy',
dict(agent_modes=['legacy'],
expected_agent_modes=['legacy', 'legacy'])),
('dvr_snat',
dict(agent_modes=['dvr_snat'],
expected_agent_modes=['dvr_snat'])),
('dvr ',
dict(agent_modes=['dvr'],
expected_agent_modes=['dvr'])),
('legacy and dvr snat',
dict(agent_modes=['legacy', 'dvr_snat', 'legacy'],
expected_agent_modes=['legacy', 'dvr_snat', 'legacy'])),
('legacy and dvr',
dict(agent_modes=['legacy', 'dvr'],
expected_agent_modes=['legacy', 'dvr', 'legacy'])),
('dvr_snat and dvr',
dict(agent_modes=['dvr_snat', 'dvr'],
expected_agent_modes=['dvr_snat', 'dvr'])),
('legacy, dvr_snat and dvr',
dict(agent_modes=['legacy', 'dvr_snat', 'dvr'],
expected_agent_modes=['legacy', 'dvr_snat', 'dvr',
'legacy'])),
('invalid',
dict(agent_modes=['invalid'],
expected_agent_modes=[])),
]
def setUp(self):
super(TestGetL3AgentsWithAgentModeFilter, self).setUp()
self.plugin = L3HAPlugin()
self.setup_coreplugin('neutron.plugins.ml2.plugin.Ml2Plugin')
self.adminContext = q_context.get_admin_context()
hosts = ['host_1', 'host_2', 'host_3', 'host_4', 'host_5']
agent_modes = ['legacy', 'dvr_snat', 'dvr', 'fake_mode', 'legacy']
for host, agent_mode in zip(hosts, agent_modes):
self._register_l3_agent(host, agent_mode, self.plugin)
def _get_agent_mode(self, agent):
agent_conf = self.plugin.get_configuration_dict(agent)
return agent_conf.get('agent_mode', 'None')
def test_get_l3_agents(self):
l3_agents = self.plugin.get_l3_agents(
self.adminContext, filters={'agent_modes': self.agent_modes})
self.assertEqual(len(self.expected_agent_modes), len(l3_agents))
returned_agent_modes = [self._get_agent_mode(agent)
for agent in l3_agents]
self.assertEqual(self.expected_agent_modes, returned_agent_modes)<|fim▁end|> | 'topic': topics.L3_AGENT, |
<|file_name|>SOAPEnvelopeBlockImpl.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
*
*/
package org.apache.axis2.jaxws.message.databinding.impl;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMOutputFormat;
import org.apache.axis2.jaxws.ExceptionFactory;
import org.apache.axis2.jaxws.message.Message;
import org.apache.axis2.jaxws.message.databinding.SOAPEnvelopeBlock;
import org.apache.axis2.jaxws.message.factory.BlockFactory;
import org.apache.axis2.jaxws.message.factory.MessageFactory;
import org.apache.axis2.jaxws.message.impl.BlockImpl;
import org.apache.axis2.jaxws.message.util.SOAPElementReader;
import org.apache.axis2.jaxws.registry.FactoryRegistry;
import javax.xml.namespace.QName;
import javax.xml.soap.SOAPElement;
import javax.xml.soap.SOAPEnvelope;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import javax.xml.ws.WebServiceException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
/**
*
*
*/
public class SOAPEnvelopeBlockImpl extends BlockImpl implements SOAPEnvelopeBlock {
/**
* Called by SOAPEnvelopeBlockFactory
*
* @param busObject
* @param busContext
* @param qName
* @param factory
*/
public SOAPEnvelopeBlockImpl(Object busObject, Object busContext,
QName qName, BlockFactory factory) {
super(busObject,
busContext,
(qName == null) ? getQName((SOAPEnvelope)busObject) : qName,
factory);
}
/**
* Called by SOAPEnvelopeBlockFactory
*
* @param omElement
* @param busContext
* @param qName
* @param factory
*/
public SOAPEnvelopeBlockImpl(OMElement omElement, Object busContext,
QName qName, BlockFactory factory) {
super(omElement, busContext, qName, factory);
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_getBOFromReader(javax.xml.stream.XMLStreamReader, java.lang.Object)
*/
@Override
protected Object _getBOFromReader(XMLStreamReader reader, Object busContext)
throws XMLStreamException, WebServiceException {
MessageFactory mf = (MessageFactory)FactoryRegistry.getFactory(MessageFactory.class);
Message message = mf.createFrom(reader, null);
SOAPEnvelope env = message.getAsSOAPEnvelope();
this.setQName(getQName(env));
return env;
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_getReaderFromBO(java.lang.Object, java.lang.Object)
*/
@Override
protected XMLStreamReader _getReaderFromBO(Object busObj, Object busContext)
throws XMLStreamException, WebServiceException {
return new SOAPElementReader((SOAPElement)busObj);
}
/* (non-Javadoc)
* @see org.apache.axis2.jaxws.message.impl.BlockImpl#_outputFromBO(java.lang.Object, java.lang.Object, javax.xml.stream.XMLStreamWriter)
*/
@Override
protected void _outputFromBO(Object busObject, Object busContext,
XMLStreamWriter writer)
throws XMLStreamException, WebServiceException {
XMLStreamReader reader = _getReaderFromBO(busObject, busContext);
_outputFromReader(reader, writer);
}
/**
* Get the QName of the envelope
*
* @param env
* @return QName
*/
private static QName getQName(SOAPEnvelope env) {
return new QName(env.getNamespaceURI(), env.getLocalName(), env.getPrefix());
}
public boolean isElementData() {
return true;
}
public void close() {
return; // Nothing to close
}
public InputStream getXMLInputStream(String encoding) throws UnsupportedEncodingException {
byte[] bytes = getXMLBytes(encoding);
return new ByteArrayInputStream(bytes);
}
<|fim▁hole|> public Object getObject() {
try {
return getBusinessObject(false);
} catch (XMLStreamException e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
public boolean isDestructiveRead() {
return false;
}
public boolean isDestructiveWrite() {
return false;
}
public byte[] getXMLBytes(String encoding) throws UnsupportedEncodingException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
OMOutputFormat format = new OMOutputFormat();
format.setCharSetEncoding(encoding);
try {
serialize(baos, format);
baos.flush();
return baos.toByteArray();
} catch (XMLStreamException e) {
throw ExceptionFactory.makeWebServiceException(e);
} catch (IOException e) {
throw ExceptionFactory.makeWebServiceException(e);
}
}
}<|fim▁end|> | |
<|file_name|>feature_extraction.py<|end_file_name|><|fim▁begin|>import librosa
import numpy as np
import help_functions
def extract_mfccdd(fpath, n_mfcc=13, winsize=0.25, sampling_rate=16000):
'''
Compute MFCCs, first and second derivatives
:param fpath: the file path
:param n_mfcc: the number of MFCC coefficients. Default = 13 coefficients
:param winsize: the time length of the window for MFCC extraction. Default 0.25s (250ms)
:param sampling_rate: the sampling rate. The file is loaded and converted to the specified sampling rate.
:return: a 2D numpy matrix (frames * MFCCdd)
'''
help_functions.check_existence(fpath)
data, sr = librosa.load(fpath, sr=sampling_rate, mono=True)
winlen = int(2 * winsize * sr)
winstep = int(winlen / 2.0)
mfccs = librosa.feature.mfcc(y=data, sr=sr, n_mfcc=n_mfcc, n_fft=winlen, hop_length=winstep)
deltas = librosa.feature.delta(mfccs)
deltadeltas = librosa.feature.delta(deltas)<|fim▁hole|>def extract_multiple_features(fpath, n_mfcc=13, sampling_rate=16000):
chroma_feature = librosa.feature.chroma_stft(fpath, sampling_rate) # 12
mfcc_feature = librosa.feature.mfcc(fpath, sampling_rate, n_mfcc=n_mfcc) # default = 20
rmse_feature = librosa.feature.rmse(fpath) # 1
spectral_centroid_feature = librosa.feature.spectral_centroid(fpath, sampling_rate) #1
spectral_bandwidth_feature = librosa.feature.spectral_bandwidth(fpath, sampling_rate) #1
#spectral_contrast_feature = librosa.feature.spectral_contrast(data,rate) #7
spectral_rolloff_feature = librosa.feature.spectral_rolloff(fpath, sampling_rate) #1
poly_features = librosa.feature.poly_features(fpath, sampling_rate) #2
#tonnetz_feature = librosa.feature.tonnetz(data,rate) #6
zero_crossing_rate_feature = librosa.feature.zero_crossing_rate(fpath, sampling_rate) #1
l = len(chroma_feature[0])
chroma_feature = np.reshape(chroma_feature,[l ,len(chroma_feature)])
mfcc_feature = np.reshape(mfcc_feature,[l ,len(mfcc_feature)])
rmse_feature = np.reshape(rmse_feature,[l ,len(rmse_feature)])
spectral_centroid_feature = np.reshape(spectral_centroid_feature,[l ,len(spectral_centroid_feature)])
spectral_bandwidth_feature = np.reshape(spectral_bandwidth_feature,[l ,len(spectral_bandwidth_feature)])
#spectral_contrast_feature = np.reshape(spectral_contrast_feature,[l ,len(spectral_contrast_feature)])
spectral_rolloff_feature = np.reshape(spectral_rolloff_feature,[l ,len(spectral_rolloff_feature)])
poly_features = np.reshape(poly_features,[l ,len(poly_features)])
#tonnetz_feature = np.reshape(tonnetz_feature,[l ,len(tonnetz_feature)])
zero_crossing_rate_feature = np.reshape(zero_crossing_rate_feature,[l ,len(zero_crossing_rate_feature)])
# Concatenate all features to a feature vector (length = 32)
features = np.concatenate((chroma_feature,mfcc_feature,rmse_feature,
spectral_centroid_feature,spectral_bandwidth_feature,
spectral_rolloff_feature, poly_features,
zero_crossing_rate_feature),axis=1)
return features<|fim▁end|> | mfccdd = np.concatenate((mfccs, deltas, deltadeltas), axis=1)
return mfccdd
|
<|file_name|>block-arg.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::vec;
// Check usage and precedence of block arguments in expressions:
pub fn main() {
let v = ~[-1f, 0f, 1f, 2f, 3f];
// Statement form does not require parentheses:
for v.iter().advance |i| {
info!("%?", *i);
}
// Usable at all:
let mut any_negative = do v.iter().any_ |e| { e.is_negative() };
assert!(any_negative);
// Higher precedence than assignments:
any_negative = do v.iter().any_ |e| { e.is_negative() };
assert!(any_negative);
// Higher precedence than unary operations:
let abs_v = do v.iter().transform |e| { e.abs() }.collect::<~[float]>();
assert!(do abs_v.iter().all |e| { e.is_positive() });
assert!(!do abs_v.iter().any_ |e| { e.is_negative() });
// Usable in funny statement-like forms:
if !do v.iter().any_ |e| { e.is_positive() } {
assert!(false);
}
match do v.iter().all |e| { e.is_negative() } {
true => { fail!("incorrect answer."); }
false => { }
}
match 3 {
_ if do v.iter().any_ |e| { e.is_negative() } => {
}
_ => {
fail!("wrong answer.");
}
}
// Lower precedence than binary operations:
let w = do v.iter().fold(0f) |x, y| { x + *y } + 10f;
let y = do v.iter().fold(0f) |x, y| { x + *y } + 10f;
let z = 10f + do v.iter().fold(0f) |x, y| { x + *y };
assert_eq!(w, y);
assert_eq!(y, z);
// In the tail of a block
let w =
if true { do abs_v.iter().any_ |e| { e.is_positive() } }
else { false };
assert!(w);
}<|fim▁end|> | // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
<|file_name|>manager-dialog.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, OnDestroy } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Response } from '@angular/http';
import { Observable } from 'rxjs/Observable';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { JhiEventManager } from 'ng-jhipster';
import { Manager } from './manager.model';
import { ManagerPopupService } from './manager-popup.service';
import { ManagerService } from './manager.service';
@Component({
selector: 'jhi-manager-dialog',
templateUrl: './manager-dialog.component.html'<|fim▁hole|>export class ManagerDialogComponent implements OnInit {
manager: Manager;
isSaving: boolean;
constructor(
public activeModal: NgbActiveModal,
private managerService: ManagerService,
private eventManager: JhiEventManager
) {
}
ngOnInit() {
this.isSaving = false;
}
clear() {
this.activeModal.dismiss('cancel');
}
save() {
this.isSaving = true;
if (this.manager.id !== undefined) {
this.subscribeToSaveResponse(
this.managerService.update(this.manager));
} else {
this.subscribeToSaveResponse(
this.managerService.create(this.manager));
}
}
private subscribeToSaveResponse(result: Observable<Manager>) {
result.subscribe((res: Manager) =>
this.onSaveSuccess(res), (res: Response) => this.onSaveError());
}
private onSaveSuccess(result: Manager) {
this.eventManager.broadcast({ name: 'managerListModification', content: 'OK'});
this.isSaving = false;
this.activeModal.dismiss(result);
}
private onSaveError() {
this.isSaving = false;
}
}
@Component({
selector: 'jhi-manager-popup',
template: ''
})
export class ManagerPopupComponent implements OnInit, OnDestroy {
routeSub: any;
constructor(
private route: ActivatedRoute,
private managerPopupService: ManagerPopupService
) {}
ngOnInit() {
this.routeSub = this.route.params.subscribe((params) => {
if ( params['id'] ) {
this.managerPopupService
.open(ManagerDialogComponent as Component, params['id']);
} else {
this.managerPopupService
.open(ManagerDialogComponent as Component);
}
});
}
ngOnDestroy() {
this.routeSub.unsubscribe();
}
}<|fim▁end|> | }) |
<|file_name|>IC7443.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <headingcell level=2>
# Usage of IC 7443
# <codecell>
from __future__ import print_function
from BinPy import *
# <codecell>
# Usage of IC 7443:
ic = IC_7443()
print(ic.__doc__)
# <codecell>
# The Pin configuration is:
inp = {8: 0, 12: 0, 13: 1, 14: 0, 15: 1, 16: 1}
# Pin initinalization
# Powering up the IC - using -- ic.setIC({14: 1, 7: 0})
ic.setIC({14: 1, 7: 0})
# Setting the inputs of the ic
ic.setIC(inp)
# Draw the IC with the current configuration\n
ic.drawIC()
<|fim▁hole|># Run the IC with the current configuration using -- print ic.run() --
# Note that the ic.run() returns a dict of pin configuration similar to
print (ic.run())
# <codecell>
# Seting the outputs to the current IC configuration using --
# ic.setIC(ic.run()) --\n
ic.setIC(ic.run())
# Draw the final configuration
ic.drawIC()
# <codecell>
# Seting the outputs to the current IC configuration using --
# ic.setIC(ic.run()) --
ic.setIC(ic.run())
# Draw the final configuration
ic.drawIC()
# Run the IC
print (ic.run())
# <codecell>
# Connector Outputs
c = Connector()
# Set the output connector to a particular pin of the ic
ic.setOutput(1, c)
print(c)<|fim▁end|> | # <codecell>
|
<|file_name|>reacttree.go<|end_file_name|><|fim▁begin|>package flux
import (
"errors"
"fmt"
"reflect"
)
//Reactive data components that are able to react to changes within the given fields they have so an action can be initiated
// ReactorType is the reflect.TypeOf value of the Reactor interface
var ReactorType = reflect.TypeOf((*Reactor)(nil)).Elem()
// DataTreeRegister provides a interface that defines a registering method for datatrees
type DataTreeRegister interface {
registerObserver(string, Reactor)
}
// DataTrees define a simple datatree type
type DataTrees interface {
Reactor
DataTreeRegister
Track(string) (Reactor, error)
Tracking(string) bool
HasTracks() bool
}
// DataTree reprsent a base struct for reactivity of which other structs compose to allow reactive data behaviours
type DataTree struct {
//Reactor for the tree that emits itself everytime a child Reactor changes
Reactor `yaml:"-" json:"-"`
//dirties contain a auto-generated list of field names that have indeed become dirty/received and accepted changes
trackers map[string]Reactor
// ro sync.RWMutex
}
// NewDataTree returs a new instance of datatree
func NewDataTree() *DataTree {
dt := DataTree{
Reactor: ReactIdentity(),
trackers: make(map[string]Reactor),
}
return &dt
}
// Track returns the reactor with the fieldname if it exists else return an error
func (b *DataTree) Track(attr string) (Reactor, error) {
bx, ok := b.trackers[attr]
if !ok {
return nil, ErrNotReactor
}
return bx, nil
}
// Tracking returns true/false if a field matching the name is being tracked
func (b *DataTree) Tracking(attr string) bool {
_, ok := b.trackers[attr]
return ok
}
// HasTracks returns true/false if the tree is being tracked
func (b *DataTree) HasTracks() bool {
return len(b.trackers) > 0
}
// registerObserver registers a reactor with the tree for change notifications
func (b *DataTree) registerObserver(name string, ob Reactor) {
if b == ob {
return
}
if _, ok := b.trackers[name]; ok {
return
}
b.trackers[name] = ob
ob.React(func(r Reactor, err error, _ interface{}) {
if err != nil {
b.SendError(err)
return
}
b.Send(b)
}, true)
}
// ErrSelfRegister is returned when a tree tries to register itself
var ErrSelfRegister = errors.New("DataTree can not register self")
// ErrNotReactor is returned when a interface is not a reactor
var ErrNotReactor = errors.New("interface is not Reactor type")
// RegisterReflectWith registers the name and reflect.Value if its a Reactor with a DataTree
func RegisterReflectWith(tree DataTreeRegister, name string, rot reflect.Value) error {
if rot.Interface() == tree {
return ErrSelfRegister
}
// rot := reflect.ValueOf(data)
if rot.Kind() == reflect.Ptr {
rot = rot.Elem()
}
if !rot.Type().Implements(ReactorType) {
return ErrNotReactor
}
rcfl := rot.Elem().Interface().(Reactor)
tree.registerObserver(name, rcfl)
return nil
}
// RegisterStructObservers takes an interface who's type is a struct and searches within if for any Observers and registers them with a DataTreeRegister to enable self reactivity in the tree
func RegisterStructObservers(tree DataTreeRegister, treeable interface{}) error {
if tree == treeable {
return ErrSelfRegister
}
rot := reflect.ValueOf(treeable)
if rot.Kind() == reflect.Ptr {
rot = rot.Elem()
}
rotto := rot.Type()
for i := 0; i < rot.NumField(); i++ {
//get the field
fl := rot.Field(i)
//get the type field from the struct
flo := rotto.Field(i)
// since the kind is always indescriminate we cant use it
// if fl.Kind() != reflect.Struct {
// continue
// }
if fl.Elem().Interface() == tree {
continue
}
if !fl.Type().Implements(ReactorType) {
continue
}
rcfl := fl.Elem().Interface().(Reactor)
tree.registerObserver(flo.Name, rcfl)
}
return nil
}
// RegisterListObservers registers a slice/array elements where the elements are Reactors with a DataTree,all indexes are stringed,so if you want 1 do "1"
func RegisterListObservers(tree DataTreeRegister, list []interface{}) error {
for id, target := range list {
if target == tree {
continue
}<|fim▁hole|>
if !ok {
continue
}
tree.registerObserver(fmt.Sprintf("%d", id), fl)
}
return nil
}
// RegisterMapObservers registers a slice/array elements where the elements are Reactors with a DataTree
func RegisterMapObservers(tree DataTreeRegister, dlist map[string]interface{}) error {
for id, target := range dlist {
if target == tree {
continue
}
fl, ok := target.(Reactor)
if !ok {
continue
}
tree.registerObserver(id, fl)
}
return nil
}<|fim▁end|> |
fl, ok := target.(Reactor) |
<|file_name|>mongodbcatalog_test.go<|end_file_name|><|fim▁begin|>package mongodbcatalog
import (
"fmt"
"strconv"
"strings"
"testing"
"github.com/cloudstax/firecamp/api/catalog"
"github.com/cloudstax/firecamp/api/common"
"github.com/cloudstax/firecamp/pkg/dns"
)
func TestMongoDBReplicaConfig(t *testing.T) {
region := "reg1"
platform := "ecs"
cluster := "t1"
domain := "t1-firecamp.com"
manageurl := "mgt." + domain
service := "s1"
member := "m1"
az := "az1"
azs := []string{"az1", "az2", "az3"}
maxMemMB := int64(256)
replSetName := getConfigServerName(service)
role := configRole
cfg := genReplicaConfig(platform, domain, member, replSetName, role, az)
if cfg.Zone != az || cfg.MemberName != member || len(cfg.Configs) != 1 {
t.Fatalf("expect zone %s member name %s 1 configs, get %s %s %d", az, member, cfg.Zone, cfg.MemberName, len(cfg.Configs))
}
if !strings.Contains(cfg.Configs[0].Content, configRole) {
t.Fatalf("expect configsvr role for config server, get %s", cfg.Configs[0].Content)
}
replSetName = getShardName(service, 0)
role = shardRole
cfg = genReplicaConfig(platform, domain, member, replSetName, role, az)
if cfg.Zone != az || cfg.MemberName != member || len(cfg.Configs) != 1 {
t.Fatalf("expect zone %s member name %s 1 configs, get %s %s %d", az, member, cfg.Zone, cfg.MemberName, len(cfg.Configs))
}
if !strings.Contains(cfg.Configs[0].Content, shardRole) {
t.Fatalf("expect shardsvr role for shard, get %s", cfg.Configs[0].Content)
}
replSetName = service
role = emptyRole
cfg = genReplicaConfig(platform, domain, member, replSetName, role, az)
if cfg.Zone != az || cfg.MemberName != member || len(cfg.Configs) != 1 {
t.Fatalf("expect zone %s member name %s 1 configs, get %s %s %d", az, member, cfg.Zone, cfg.MemberName, len(cfg.Configs))
}
if strings.Contains(cfg.Configs[0].Content, "clusterRole:") {
t.Fatalf("not expect cluster role for replica set, get %s", cfg.Configs[0].Content)
}
// test service configs
opts := &catalog.CatalogMongoDBOptions{
Shards: 1,
ReplicasPerShard: 1,
ReplicaSetOnly: true,
}
keyfileContent, err := GenKeyfileContent()
if err != nil {
t.Fatalf("genKeyfileContent error %s", err)
}
serviceCfgs := genServiceConfigs(platform, maxMemMB, opts, keyfileContent)
if !strings.Contains(serviceCfgs[0].Content, "SHARDS=1") {
t.Fatalf("expect 1 shards, get %s", cfg.Configs[0].Content)
}
// test replica configs
member = service + "-0"
replcfgs := genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 1 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 1 replcfg 3 configs, get %s %s %d %d", azs[0], member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
if strings.Contains(replcfgs[0].Configs[0].Content, "clusterRole:") {
t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[0].Configs[0].Content)
}
members := dns.GenDNSName(service+"-0", domain)
kvs := GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
if len(kvs) != 12 || kvs[11].Name != common.ENV_SERVICE_MEMBERS || kvs[11].Value != members {
t.Fatalf("expect 12 init kvs get %d, expect name %s value %s, get %s", len(kvs), common.ENV_SERVICE_MEMBERS, members, kvs[11])
}
opts.ReplicasPerShard = 3
member = service + "-0"
replcfgs = genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 3 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 3 replcfg 3 configs, get %s %s %d", az, member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
if strings.Contains(replcfgs[1].Configs[0].Content, "clusterRole:") {
t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[0].Configs[0].Content)
}
members = dns.GenDNSName(service+"-0", domain) + "," + dns.GenDNSName(service+"-1", domain) + "," + dns.GenDNSName(service+"-2", domain)
kvs = GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
if len(kvs) != 12 || kvs[11].Name != common.ENV_SERVICE_MEMBERS || kvs[11].Value != members {
t.Fatalf("expect 12 init kvs get %d, expect name %s value %s, get %s", common.ENV_SERVICE_MEMBERS, members, kvs[11])
}
opts.ReplicasPerShard = 1
opts.ReplicaSetOnly = false
opts.ConfigServers = 1
member = service + "-config-0"
replcfgs = genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 2 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 2 replcfg 3 configs, get %s %s %d", az, member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
if strings.Contains(replcfgs[0].Configs[0].Content, "clusterrole: configsvr") {
t.Fatalf("expect cluster role configsvr for replica set, get %s", replcfgs[0].Configs[0].Content)
}
if strings.Contains(replcfgs[1].Configs[0].Content, "clusterrole:") {
t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[1].Configs[0].Content)
}
kvs = GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
members = dns.GenDNSName(service+"-config-0", domain)
if len(kvs) != 13 || kvs[11].Name != envConfigServerMembers || kvs[11].Value != members {
t.Fatalf("expect 13 init kvs get %d, expect name %s value %s, get %s", envConfigServerMembers, members, kvs[11])
}
members = dns.GenDNSName(service+"-shard0-0", domain)
if kvs[12].Name != envShardMembers || kvs[12].Value != members {
t.Fatalf("expect name %s value %s, get %s", envShardMembers, members, kvs[12])
}
opts.ReplicasPerShard = 3
opts.ReplicaSetOnly = false
opts.ConfigServers = 1
member = service + "-config-0"
replcfgs = genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 4 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 4 replcfg 3 configs, get %s %s %d", az, member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
if strings.Contains(replcfgs[0].Configs[0].Content, "clusterrole: configsvr") {
t.Fatalf("expect cluster role configsvr for replica set, get %s", replcfgs[0].Configs[0].Content)
}
if strings.Contains(replcfgs[1].Configs[0].Content, "clusterrole:") {
t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[1].Configs[0].Content)
}
kvs = GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
members = dns.GenDNSName(service+"-config-0", domain)
if len(kvs) != 13 || kvs[11].Name != envConfigServerMembers || kvs[11].Value != members {
t.Fatalf("expect 13 init kvs get %d, expect name %s value %s, get %s", envConfigServerMembers, members, kvs[11])
}
members = dns.GenDNSName(service+"-shard0-0", domain) + "," + dns.GenDNSName(service+"-shard0-1", domain) + "," + dns.GenDNSName(service+"-shard0-2", domain)
if kvs[12].Name != envShardMembers || kvs[12].Value != members {
t.Fatalf("expect name %s value %s, get %s", envShardMembers, members, kvs[12])
}
opts.ReplicasPerShard = 3
opts.ReplicaSetOnly = false
opts.ConfigServers = 3
member = service + "-config-0"
replcfgs = genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 6 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 6 replcfg 3 configs, get %s %s %d", az, member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
if strings.Contains(replcfgs[0].Configs[0].Content, "clusterrole: configsvr") {
t.Fatalf("expect cluster role configsvr for replica set, get %s", replcfgs[0].Configs[0].Content)
}
if strings.Contains(replcfgs[3].Configs[0].Content, "clusterrole:") {
t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[3].Configs[0].Content)
}
kvs = GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
members = dns.GenDNSName(service+"-config-0", domain) + "," + dns.GenDNSName(service+"-config-1", domain) + "," + dns.GenDNSName(service+"-config-2", domain)
if len(kvs) != 13 || kvs[11].Name != envConfigServerMembers || kvs[11].Value != members {
t.Fatalf("expect 13 init kvs get %d, expect name %s value %s, get %s", envConfigServerMembers, members, kvs[11])
}
members = dns.GenDNSName(service+"-shard0-0", domain) + "," + dns.GenDNSName(service+"-shard0-1", domain) + "," + dns.GenDNSName(service+"-shard0-2", domain)
if kvs[12].Name != envShardMembers || kvs[12].Value != members {
t.Fatalf("expect name %s value %s, get %s", envShardMembers, members, kvs[12])
}
opts.Shards = 2
opts.ReplicasPerShard = 3
opts.ReplicaSetOnly = false
opts.ConfigServers = 3
member = service + "-config-0"
replcfgs = genReplicaConfigs(platform, azs, cluster, service, maxMemMB, opts)
if replcfgs[0].Zone != azs[0] || replcfgs[0].MemberName != member || len(replcfgs) != 9 || len(replcfgs[0].Configs) != 1 {
t.Fatalf("expect zone %s member name %s 9 replcfg 3 configs, get %s %s %d", az, member, replcfgs[0].Zone, replcfgs[0].MemberName, len(replcfgs), len(replcfgs[0].Configs))
}
// 3 config servers
for i := int64(0); i < opts.ConfigServers; i++ {
idx := int(i) % len(azs)
if replcfgs[i].Zone != azs[idx] {
t.Fatalf("expect zone %s for replica config %d, get zone %s", azs[idx], i, replcfgs[i].Zone)
}
member = service + "-config-" + strconv.FormatInt(i, 10)
if replcfgs[i].MemberName != member {
t.Fatalf("expect config member name %s for %d, get %s", member, i, replcfgs[i].MemberName)
}
}
// shards
for shard := int64(0); shard < opts.Shards; shard++ {
for i := int64(0); i < opts.ReplicasPerShard; i++ {
idx := int(shard+i) % len(azs)
replIdx := opts.ConfigServers + shard*opts.ReplicasPerShard + i
if replcfgs[replIdx].Zone != azs[idx] {
t.Fatalf("expect zone %s for replica config %d, get zone %s", azs[idx], i, replcfgs[replIdx].Zone)
}
if shard == 0 {
member = service + "-shard0-" + strconv.FormatInt(i, 10)
if replcfgs[replIdx].MemberName != member {
t.Fatalf("expect config member name %s for %d, get %s", member, i, replcfgs[replIdx].MemberName)
}
} else {
member = service + "-shard1-" + strconv.FormatInt(i, 10)
if replcfgs[replIdx].MemberName != member {
t.Fatalf("expect config member name %s for %d, get %s", member, i, replcfgs[replIdx].MemberName)
}
}
}
}
if strings.Contains(replcfgs[0].Configs[0].Content, "clusterrole: configsvr") {
t.Fatalf("expect cluster role configsvr for replica set, get %s", replcfgs[0].Configs[0].Content)<|fim▁hole|> t.Fatalf("not expect cluster role for replica set, get %s", replcfgs[3].Configs[0].Content)
}
kvs = GenInitTaskEnvKVPairs(region, cluster, service, manageurl, opts)
members = dns.GenDNSName(service+"-config-0", domain) + "," + dns.GenDNSName(service+"-config-1", domain) + "," + dns.GenDNSName(service+"-config-2", domain)
if len(kvs) != 13 || kvs[11].Name != envConfigServerMembers || kvs[11].Value != members {
t.Fatalf("expect 13 init kvs get %d, expect name %s value %s, get %s", envConfigServerMembers, members, kvs[11])
}
members = dns.GenDNSName(service+"-shard0-0", domain) + "," + dns.GenDNSName(service+"-shard0-1", domain) + "," + dns.GenDNSName(service+"-shard0-2", domain)
members += ";" + dns.GenDNSName(service+"-shard1-0", domain) + "," + dns.GenDNSName(service+"-shard1-1", domain) + "," + dns.GenDNSName(service+"-shard1-2", domain)
if kvs[12].Name != envShardMembers || kvs[12].Value != members {
t.Fatalf("expect name %s value %s, get %s", envShardMembers, members, kvs[12])
}
}
func TestParseServiceConfigs(t *testing.T) {
shards := int64(2)
replPerShard := int64(3)
replSetOnly := false
configServers := int64(3)
admin := "admin"
pass := "pass"
content := fmt.Sprintf(servicefileContent, "ecs", shards, replPerShard,
strconv.FormatBool(replSetOnly), configServers, admin, pass)
opts, err := ParseServiceConfigs(content)
if err != nil {
t.Fatalf("ParseServiceConfigs expect success, get error %s", err)
}
if opts.Shards != shards || opts.ReplicasPerShard != replPerShard ||
opts.ReplicaSetOnly != replSetOnly || opts.ConfigServers != configServers ||
opts.Admin != admin || opts.AdminPasswd != pass {
t.Fatalf("config mismatch, get %s", opts)
}
}<|fim▁end|> | }
if strings.Contains(replcfgs[3].Configs[0].Content, "clusterrole:") { |
<|file_name|>formService.js<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* A service for maintaining form-related metadata and linking that data to
* corresponding controllers and templates.
*/
angular.module('form').provider('formService', function formServiceProvider() {
/**
* Reference to the provider itself.
*
* @type formServiceProvider
*/
var provider = this;
/**
* Map of all registered field type definitions by name.
*
* @type Object.<String, FieldType>
*/
this.fieldTypes = {
/**
* Text field type.
*
* @see {@link Field.Type.TEXT}
* @type FieldType
*/
'TEXT' : {
module : 'form',
controller : 'textFieldController',
templateUrl : 'app/form/templates/textField.html'
},
/**
* Email address field type.
*
* @see {@link Field.Type.EMAIL}
* @type FieldType
*/
'EMAIL' : {
templateUrl : 'app/form/templates/emailField.html'
},
/**
* Numeric field type.
*
* @see {@link Field.Type.NUMERIC}
* @type FieldType
*/
'NUMERIC' : {
module : 'form',
controller : 'numberFieldController',
templateUrl : 'app/form/templates/numberField.html'
},
/**
* Boolean field type.
*
* @see {@link Field.Type.BOOLEAN}
* @type FieldType
*/
'BOOLEAN' : {
module : 'form',
controller : 'checkboxFieldController',
templateUrl : 'app/form/templates/checkboxField.html'
},
/**
* Username field type. Identical in principle to a text field, but may
* have different semantics.
*
* @see {@link Field.Type.USERNAME}
* @type FieldType
*/
'USERNAME' : {
templateUrl : 'app/form/templates/textField.html'
},
/**
* Password field type. Similar to a text field, but the contents of
* the field are masked.
*
* @see {@link Field.Type.PASSWORD}
* @type FieldType
*/
'PASSWORD' : {
module : 'form',
controller : 'passwordFieldController',
templateUrl : 'app/form/templates/passwordField.html'
},
/**
* Enumerated field type. The user is presented a finite list of values
* to choose from.
*
* @see {@link Field.Type.ENUM}
* @type FieldType
*/
'ENUM' : {
module : 'form',
controller : 'selectFieldController',
templateUrl : 'app/form/templates/selectField.html'
},
/**
* Multiline field type. The user may enter multiple lines of text.
*
* @see {@link Field.Type.MULTILINE}
* @type FieldType
*/
'MULTILINE' : {
templateUrl : 'app/form/templates/textAreaField.html'
},
/**
* Field type which allows selection of languages. The languages
* displayed are the set of languages supported by the Guacamole web
* application. Legal values are valid language IDs, as dictated by
* the filenames of Guacamole's available translations.
*
* @see {@link Field.Type.LANGUAGE}
* @type FieldType
*/
'LANGUAGE' : {
module : 'form',
controller : 'languageFieldController',
templateUrl : 'app/form/templates/languageField.html'
},
/**
* Field type which allows selection of time zones.
*
* @see {@link Field.Type.TIMEZONE}
* @type FieldType
*/
'TIMEZONE' : {
module : 'form',
controller : 'timeZoneFieldController',
templateUrl : 'app/form/templates/timeZoneField.html'
},
/**
* Field type which allows selection of individual dates.
*
* @see {@link Field.Type.DATE}
* @type FieldType
*/
'DATE' : {
module : 'form',
controller : 'dateFieldController',
templateUrl : 'app/form/templates/dateField.html'
},
/**
* Field type which allows selection of times of day.
*
* @see {@link Field.Type.TIME}
* @type FieldType
*/
'TIME' : {
module : 'form',
controller : 'timeFieldController',
templateUrl : 'app/form/templates/timeField.html'
},
/**
* Field type which allows selection of color schemes accepted by the
* Guacamole server terminal emulator and protocols which leverage it.
*
* @see {@link Field.Type.TERMINAL_COLOR_SCHEME}
* @type FieldType
*/
'TERMINAL_COLOR_SCHEME' : {
module : 'form',
controller : 'terminalColorSchemeFieldController',
templateUrl : 'app/form/templates/terminalColorSchemeField.html'
}
};
/**
* Registers a new field type under the given name.
*
* @param {String} fieldTypeName
* The name which uniquely identifies the field type being registered.
*
* @param {FieldType} fieldType
* The field type definition to associate with the given name.
*/
this.registerFieldType = function registerFieldType(fieldTypeName, fieldType) {
// Store field type
provider.fieldTypes[fieldTypeName] = fieldType;
};
// Factory method required by provider
this.$get = ['$injector', function formServiceFactory($injector) {
// Required services
var $compile = $injector.get('$compile');
var $q = $injector.get('$q');
var $templateRequest = $injector.get('$templateRequest');
var service = {};
service.fieldTypes = provider.fieldTypes;
/**
* Compiles and links the field associated with the given name to the given
* scope, producing a distinct and independent DOM Element which functions
* as an instance of that field. The scope object provided must include at
* least the following properties:
*
* namespace:
* A String which defines the unique namespace associated the
* translation strings used by the form using a field of this type.
*
* fieldId:
* A String value which is reasonably likely to be unique and may
* be used to associate the main element of the field with its
* label.
*
* field:
* The Field object that is being rendered, representing a field of
* this type.
*
* model:
* The current String value of the field, if any.
*
* disabled:
* A boolean value which is true if the field should be disabled.
* If false or undefined, the field should be enabled.
*
* @param {Element} fieldContainer
* The DOM Element whose contents should be replaced with the
* compiled field template.
*
* @param {String} fieldTypeName
* The name of the field type defining the nature of the element to be
* created.
*
* @param {Object} scope
* The scope to which the new element will be linked.
*
* @return {Promise.<Element>}
* A Promise which resolves to the compiled Element. If an error occurs
* while retrieving the field type, this Promise will be rejected.
*/
service.insertFieldElement = function insertFieldElement(fieldContainer,
fieldTypeName, scope) {
// Ensure field type is defined
var fieldType = provider.fieldTypes[fieldTypeName];
if (!fieldType)
return $q.reject();
var templateRequest;
// Use raw HTML template if provided
if (fieldType.template) {
var deferredTemplate = $q.defer();
deferredTemplate.resolve(fieldType.template);
templateRequest = deferredTemplate.promise;
}<|fim▁hole|> // If no raw HTML template is provided, retrieve template from URL
else if (fieldType.templateUrl)
templateRequest = $templateRequest(fieldType.templateUrl);
// Otherwise, use empty template
else {
var emptyTemplate= $q.defer();
emptyTemplate.resolve('');
templateRequest = emptyTemplate.promise;
}
// Defer compilation of template pending successful retrieval
var compiledTemplate = $q.defer();
// Resolve with compiled HTML upon success
templateRequest.then(function templateRetrieved(html) {
// Insert template into DOM
fieldContainer.innerHTML = html;
// Populate scope using defined controller
if (fieldType.module && fieldType.controller) {
var $controller = angular.injector(['ng', fieldType.module]).get('$controller');
$controller(fieldType.controller, {
'$scope' : scope,
'$element' : angular.element(fieldContainer.childNodes)
});
}
// Compile DOM with populated scope
compiledTemplate.resolve($compile(fieldContainer.childNodes)(scope));
})
// Reject on failure
['catch'](function templateError() {
compiledTemplate.reject();
});
// Return promise which resolves to the compiled template
return compiledTemplate.promise;
};
return service;
}];
});<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from 'react';
import { Card } from 'bm-kit';
import './_pillar.schedule.source.scss';
const friday = [
{
start: '6:00 PM',
name: '📋 Check in begins'
},
{
start: '8:00 PM',
name: '🎤 Opening Ceremonies'
},
{
start: '9:00 PM',
name: '🤝 Team assembly'
},
{
start: '9:30 PM',
name: '🌮 Dinner'
},
{
start: '10:00 PM',
name: '💻 Hacking Begins'
},
{
start: '10:00 PM',
name: '🤖 Fundamentals of AI with Intel'
},
{
start: '12:00 AM',
name: '🥋 Ninja'
}
];
let saturday = [
{
start: '3:00 AM',
name: '🍿 Late Night Snack'
},
{
start: '8:00 AM',
name: '🥓 Breakfast'
},
{
start: '9:00 AM',
name: '🏗 Workshop'
},
{
start: '12:30 PM',
name: '🍝 Lunch'
},
{
start: '1:00 PM',
name: '👪 Facebook Tech Talk'
},
{
start: '2:00 PM',
name: '🐶 Doggos/Woofers'
},
{
start: '2:30 PM',
name: '✈️ Rockwell Collins Talk'
},
{
start: '3:00 PM',
name: '🍿 Snack'
},
{
start: '3:00 PM',
name: '🚣🏽 Activity'
},
{
start: '4:00 PM',
name: '📈 Startups with T.A. MaCann'
},
{
start: '6:00 PM',
name: '🍕 Dinner'
},
{
start: '9:00 PM',<|fim▁hole|> },
{
start: '10:00 PM',
name: '🍩 Donuts and Kona Ice'
},
{
start: '10:00 PM',
name: '🏗️ Jenga'
}
];
let sunday = [
{
start: '1:00 AM',
name: '🍿 Late Night Snack'
},
{
start: '8:00 AM',
name: '🍳 Breakfast'
},
{
start: '9:30 AM',
name: '🛑 Hacking Ends'
},
{
start: '10:00 AM',
name: '📔 Expo Begins'
},
{
start: '11:30 AM',
name: '🍞 Lunch'
},
{
start: '1:00 PM',
name: '🎭 Closing Ceremonies'
},
{
start: '2:30 PM',
name: '🚌 Buses Depart'
}
];
const ScheduleDay = ({ dayData, title }) => (
<Card className="p-schedule__day">
<h3 className="text-center">{title}</h3>
{dayData.map(item => (
<div className="p-schedule__item" key={item.name + item.start}>
<div className="p-schedule__item_about">
<span className="p-schedule__item_time">{item.start}</span>
<span className="p-schedule__item_title">{item.name}</span>
</div>
<div className="p-schedule__item_info">{item.info}</div>
</div>
))}
</Card>
);
const Schedule = ({ small }) => (
<div className="p-schedule">
{small ? <h3 style={{ marginTop: 0 }}>Schedule</h3> : <h1>Schedule</h1>}
<div className="p-schedule__days">
<ScheduleDay dayData={friday} title="Friday (10/19)" />
<ScheduleDay dayData={saturday} title="Saturday (10/20)" />
<ScheduleDay dayData={sunday} title="Sunday (10/21)" />
</div>
</div>
);
export default Schedule;<|fim▁end|> | name: '🥤 Cup stacking with MLH' |
<|file_name|>CircularLinkedList.java<|end_file_name|><|fim▁begin|>/**
* CircularLinkedList implementation
* @author Tyler Smith
* @version 1.0
*/
public class CircularLinkedList<T> implements LinkedListInterface<T> {
private Node<T> head = null, tail = head;
private int size = 0;
@Override
public void addAtIndex(int index, T data) {
if (index < 0 || index > this.size()) {
throw new IndexOutOfBoundsException();
}
if (index == 0) {
this.addToFront(data);
} else if (index == this.size()) {
this.addToBack(data);
} else {
Node<T> current = head;
if (index == 1) {
current.setNext(new Node<T>(data, current.getNext()));
} else {
for (int i = 0; i < index - 1; i++) {
current = current.getNext();
}
Node<T> temp = current;
current = new Node<T>(data, temp);
}
size++;
}
}
@Override
public T get(int index) {
if (index < 0 || index >= size) {
throw new IndexOutOfBoundsException();
}
Node<T> current = head;
for (int i = 0; i < index; i++) {
current = current.getNext();
}
return current.getData();
}
@Override
public T removeAtIndex(int index) {
if (index < 0 || index >= this.size()) {
throw new IndexOutOfBoundsException();
}
if (index == 0) {
T data = head.getData();
head = head.getNext();
tail.setNext(head);
size--;
return data;
} else {
Node<T> before = tail;
Node<T> current = head;
for (int i = 0; i < index; i++) {
before = current;
current = current.getNext();
}
T data = current.getData();
before.setNext(current.getNext());
size--;
return data;
}
}
@Override
public void addToFront(T t) {
if (this.isEmpty()) {
head = new Node<T>(t, tail);
tail = head;
tail.setNext(head);
size++;
return;
}
Node<T> node = new Node<T>(t, head);
head = node;
tail.setNext(head);
size++;
}
@Override
public void addToBack(T t) {
if (this.isEmpty()) {
tail = new Node<T>(t);
head = tail;
tail.setNext(head);
head.setNext(tail);
size++;
} else {
Node<T> temp = tail;
tail = new Node<T>(t, head);
temp.setNext(tail);
size++;
}
}
@Override
public T removeFromFront() {
if (this.isEmpty()) {
return null;
}
Node<T> ret = head;
head = head.getNext();
tail.setNext(head);
size--;
return ret.getData();
}
@Override
public T removeFromBack() {
if (this.isEmpty()) {
return null;
}
Node<T> iterate = head;
while (iterate.getNext() != tail) {
iterate = iterate.getNext();
}
iterate.setNext(head);
Node<T> ret = tail;
tail = iterate;
size--;
return ret.getData();
}
@SuppressWarnings("unchecked")
@Override
public T[] toList() {
Object[] list = new Object[this.size()];
int i = 0;
Node<T> current = head;
while (i < this.size()) {
list[i] = current.getData();
current = current.getNext();
i++;
}
return ((T[]) list);
}
@Override
public boolean isEmpty() {
return (this.size() == 0);
}
@Override
public int size() {
return size;
}
@Override
public void clear() {
head = null;
tail = null;
size = 0;
}
/**
* Reference to the head node of the linked list.
* Normally, you would not do this, but we need it<|fim▁hole|> public Node<T> getHead() {
return head;
}
/**
* Reference to the tail node of the linked list.
* Normally, you would not do this, but we need it
* for grading your work.
*
* @return Node representing the tail of the linked list
*/
public Node<T> getTail() {
return tail;
}
/**
* This method is for your testing purposes.
* You may choose to implement it if you wish.
*/
@Override
public String toString() {
return "";
}
}<|fim▁end|> | * for grading your work.
*
* @return Node representing the head of the linked list
*/ |
<|file_name|>services.py<|end_file_name|><|fim▁begin|>import re
from copy import copy
from random import randint
class Server(object):
def __init__(self, ip, port, hostname):
self.ip = ip
self.port = port
self.hostname = hostname
self.weight = 500
self.maxconn = None
def __cmp__(self, other):
if not isinstance(other, Server):
return -1
return cmp((self.ip, self.port, self.weight, self.maxconn), (other.ip, other.port, other.weight, other.maxconn))
def __hash__(self):
return hash((self.ip, self.port, self.weight, self.maxconn))
def __str__(self):
extra = []
if self.weight != 500:
extra.append("weight=%d" % self.weight)
if self.maxconn:
extra.append("maxconn=%d" % self.maxconn)
result = '%s:%s' % (self.ip, self.port)
if extra:
result += '(%s)' % ','.join(extra)
return result
def __repr__(self):
return 'Server(%s, %s, %s, %s)' % (repr(self.ip), repr(self.port), repr(self.weight), repr(self.maxconn))
def clone(self):
return copy(self)
def setWeight(self, weight):
clone = self.clone()
clone.weight = weight
return clone
def setMaxconn(self, maxconn):
clone = self.clone()
clone.maxconn = maxconn
return clone
class Service(object):
def __init__(self, name, source, port, protocol, application='binary', healthcheck=False, healthcheckurl='/', timeoutclient=None, timeoutserver=None):
self.name = name
self.source = source
self.port = port
self.protocol = protocol
self.application = application
self.healthcheck = healthcheck
self.healthcheckurl = healthcheckurl
self.timeoutclient = timeoutclient
self.timeoutserver = timeoutserver
self.servers = set()
self.slots = []
# Check if there's a port override
match = re.search('.@(\d+)$', self.name)
if match:
self.name = self.name[0:-(len(match.group(1))+1)]
self.port = int(match.group(1))
def clone(self):
clone = Service(self.name, self.source, self.port, self.protocol, self.application, self.healthcheck, self.healthcheckurl, self.timeoutclient,
self.timeoutserver)
clone.servers = set(self.servers)
clone.slots = list(self.slots)
return clone
def __str__(self):
# Represent misc. service attributes as k=v pairs, but only if their value is not None
service_attributes = ['timeoutclient', 'timeoutserver']
service_options = ['%s=%s' % (attr, getattr(self, attr)) for attr in service_attributes if getattr(self, attr) is not None]
# Only use healthcheckurl if healtcheck has a meaningful value
if self.healthcheck:
service_options.append('healtcheck=%s' % self.healthcheck)
service_options.append('healthcheckurl=%s' % self.healthcheckurl)
return '%s:%s/%s%s -> [%s]' % (
self.name, self.port, self.application if self.application != 'binary' else self.protocol,
'(%s)' % ','.join(service_options) if service_options else '',
', '.join([str(s) for s in sorted(self.servers)]))
def __repr__(self):
return 'Service(%s, %s, %s, %s, %s)' % (repr(self.name), repr(self.port), repr(self.protocol), repr(self.application), repr(sorted(self.servers)))
def __cmp__(self, other):
if not isinstance(other, Service):
return -1
return cmp((self.name, self.port, self.protocol, self.servers), (other.name, other.port, other.protocol, other.servers))
def __hash__(self):
return hash((self.name, self.port, self.protocol, self.servers))
@property
def portname(self):
return re.sub('[^a-zA-Z0-9]', '_', str(self.port))
@property
def marathonpath(self):
ret = ''
for s in self.name.split('.'):
if ret is not '':
ret = s + '.' + ret
else:
ret = s
return ret
def update(self, other):
"""
Returns an new updated Service object
"""
clone = self.clone()
clone.name = other.name
clone.source = other.source
clone.port = other.port
clone.protocol = other.protocol
clone.timeoutclient = other.timeoutclient
clone.timeoutserver = other.timeoutserver
for server in clone.servers - other.servers:
clone._remove(server)
for server in other.servers - clone.servers:
clone._add(server)
return clone
def addServer(self, server):
clone = self.clone()
clone._add(server)
return clone
def setApplication(self, application):
clone = self.clone()
clone.application = application
return clone
def _add(self, server):
self.servers.add(server)
# Keep servers in the same index when they're added
for i in range(len(self.slots)):
if not self.slots[i]:
self.slots[i] = server
return
# Not present in list, just insert randomly
self.slots.insert(randint(0, len(self.slots)), server)
def _remove(self, server):
self.servers.remove(server)
# Set the server slot to None
for i in range(len(self.slots)):<|fim▁hole|>
raise KeyError(str(server))<|fim▁end|> | if self.slots[i] == server:
del self.slots[i]
return |
<|file_name|>jinja2.py<|end_file_name|><|fim▁begin|>from django.contrib.staticfiles.storage import staticfiles_storage
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from jinja2 import Environment
from albums.models import Album, Artist, RecordLabel
def get_spotify_search_url(term):
return 'https://open.spotify.com/search/results/'+term
def get_entity_url(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.content_type.model)
obj_class = content_type.model_class()
url = ''
if obj_class == Album:
url = reverse('albums:album-detail', args=[watson_obj.object_id_int])
elif obj_class == Artist:
url = reverse('albums:albums-by-artist', args=[watson_obj.object_id_int])
elif obj_class == RecordLabel:
url = reverse('albums:albums-by-label', args=[watson_obj.object_id_int])
return url
ENTITY_LABELS = {
Album: 'Album',
RecordLabel: 'Label',
Artist: 'Artist',
}
def get_entity_type_label(watson_obj):
content_type = ContentType.objects.get(app_label=watson_obj.content_type.app_label,
model=watson_obj.content_type.model)
obj_class = content_type.model_class()
return ENTITY_LABELS[obj_class]
def environment(**options):
env = Environment(**options)
env.globals.update({<|fim▁hole|> 'get_spotify_search_url': get_spotify_search_url,
'get_entity_url': get_entity_url,
'get_entity_type_label': get_entity_type_label,
})
return env<|fim▁end|> | 'static': staticfiles_storage.url,
'url': reverse, |
<|file_name|>get_vgg16.py<|end_file_name|><|fim▁begin|>from tensorflow.keras.applications.vgg16 import VGG16<|fim▁hole|>tfjs.converters.save_keras_model(model, 'vgg16_tfjs')<|fim▁end|> | import tensorflowjs as tfjs
model = VGG16(weights='imagenet')
|
<|file_name|>BaseExporter.js<|end_file_name|><|fim▁begin|>BaseExporter = function () {};
BaseExporter.prototype = {};
function niceWrite (writeStream, toWrite) {
// NOTE: might need to break toWrite into multiple strings
if (toWrite.length > 10000) {
console.log("break toWrite into multiple strings");
}
var keepWriting = writeStream.write(toWrite);
if (keepWriting) {
// return a promise that has already been resolved
// any .then()s connected to this will fire immidiately
return Q();
}
// waits until the stream has drained, then resolves
return new Q.Promise(function (resolve) {
writeStream.once("drain", resolve);
});
}
BaseExporter.prototype.run = function (destination, options) {
var self = this;
return new Q.Promise(function (resolve, reject) {
// make sure it's being called with new keyword
if (!self.init) {
console.log("not called with new keyword");<|fim▁hole|> self.init.call(self, options);
self.writeStream = createWriteStream(destination);
var lineNumber = 1; // careful: starts at 1
function writeNextLine () {
var chunks = [];
var lineMaybePromise = self.getLine.call(self, function (chunk) {
chunks.push(chunk);
}, lineNumber);
Q(lineMaybePromise)
.then(function () {
if (chunks.length) {
lineNumber++;
// write all chunks, wait for drain (if necessary), call itself
niceWrite(self.writeStream, chunks.join("") + "\n")
.then(writeNextLine)
.catch(reject);
} else {
// end the write stream, close the file
self.writeStream.end(resolve);
}
});
}
writeNextLine(); // kick off the writing
});
};
BaseExporter.prototype.init = function (write, lineNumber) {
throw new Error("init not overridden");
};
BaseExporter.prototype.getLine = function (write, lineNumber) {
throw new Error("getLine not overridden");
};<|fim▁end|> | throw new Error("not called with new keyword");
}
|
<|file_name|>WrapBootstrapActivity.java<|end_file_name|><|fim▁begin|>package org.develnext.jphp.android.ext.classes.app;
import android.os.Bundle;
import org.develnext.jphp.android.AndroidStandaloneLoader;
import org.develnext.jphp.android.ext.AndroidExtension;
import php.runtime.annotation.Reflection;
@Reflection.Name(AndroidExtension.NAMESPACE + "app\\BootstrapActivity")
public class WrapBootstrapActivity extends WrapActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreateClearly(savedInstanceState);<|fim▁hole|> AndroidStandaloneLoader.INSTANCE.run(this);
getEnvironment().invokeMethodNoThrow(this, "onCreate");
}
}<|fim▁end|> | |
<|file_name|>TestJsonReader.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.vector.complex.writer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import java.util.zip.GZIPOutputStream;
import org.apache.drill.BaseTestQuery;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.util.FileUtils;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.vector.IntVector;
import org.apache.drill.exec.vector.RepeatedBigIntVector;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import org.junit.rules.TemporaryFolder;
public class TestJsonReader extends BaseTestQuery {
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestJsonReader.class);
private static final boolean VERBOSE_DEBUG = false;
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Test
public void schemaChange() throws Exception {
test("select b from dfs.`${WORKING_PATH}/src/test/resources/vector/complex/writer/schemaChange/`");
}
@Test
@Ignore("DRILL-1824")
public void schemaChangeValidate() throws Exception {
testBuilder() //
.sqlQuery("select b from dfs.`${WORKING_PATH}/src/test/resources/vector/complex/writer/schemaChange/`") //
.unOrdered() //
.jsonBaselineFile("/vector/complex/writer/expected.json") //
.build()
.run();
}
public void runTestsOnFile(String filename, UserBitShared.QueryType queryType, String[] queries, long[] rowCounts) throws Exception {
if (VERBOSE_DEBUG) {
System.out.println("===================");
System.out.println("source data in json");
System.out.println("===================");
System.out.println(Files.toString(FileUtils.getResourceAsFile(filename), Charsets.UTF_8));
}
int i = 0;
for (String query : queries) {
if (VERBOSE_DEBUG) {
System.out.println("=====");
System.out.println("query");
System.out.println("=====");
System.out.println(query);
System.out.println("======");
System.out.println("result");
System.out.println("======");
}
int rowCount = testRunAndPrint(queryType, query);
assertEquals(rowCounts[i], rowCount);
System.out.println();
i++;
}
}
@Test
public void testReadCompressed() throws Exception {
String filepath = "compressed_json.json";
File f = folder.newFile(filepath);
PrintWriter out = new PrintWriter(f);
out.println("{\"a\" :5}");
out.close();
gzipIt(f);
testBuilder()
.sqlQuery("select * from dfs.`" + f.getPath() + ".gz" + "`")
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
.build().run();
// test reading the uncompressed version as well
testBuilder()
.sqlQuery("select * from dfs.`" + f.getPath() + "`")
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
.build().run();
}
public void gzipIt(File sourceFile) throws IOException {
// modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
byte[] buffer = new byte[1024];
GZIPOutputStream gzos =
new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
FileInputStream in =
new FileInputStream(sourceFile);
int len;
while ((len = in.read(buffer)) > 0) {
gzos.write(buffer, 0, len);
}
in.close();
gzos.finish();
gzos.close();
}
@Test
public void testDrill_1419() throws Exception {
String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`/store/json/clicks.json` t limit 5"};
long[] rowCounts = {5};
String filename = "/store/json/clicks.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testRepeatedCount() throws Exception {
test("select repeated_count(str_list) from cp.`/store/json/json_basic_repeated_varchar.json`");
test("select repeated_count(INT_col) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_count(FLOAT4_col) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_count(VARCHAR_col) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_count(BIT_col) from cp.`/parquet/alltypes_repeated.json`");
}
@Test
public void testRepeatedContains() throws Exception {
test("select repeated_contains(str_list, 'asdf') from cp.`/store/json/json_basic_repeated_varchar.json`");
test("select repeated_contains(INT_col, -2147483648) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_contains(BIT_col, true) from cp.`/parquet/alltypes_repeated.json`");
test("select repeated_contains(BIT_col, false) from cp.`/parquet/alltypes_repeated.json`");
}
@Test
public void testSingleColumnRead_vector_fill_bug() throws Exception {
String[] queries = {"select * from cp.`/store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testNonExistentColumnReadAlone() throws Exception {
String[] queries = {"select non_existent_column from cp.`/store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testAllTextMode() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`/store/json/schema_change_int_to_string.json`"};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void readComplexWithStar() throws Exception {
List<QueryDataBatch> results = testSqlWithResults("select * from cp.`/store/json/test_complex_read_with_star.json`");
assertEquals(1, results.size());
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
assertEquals(3, batchLoader.getSchema().getFieldCount());
testExistentColumns(batchLoader);
batch.release();
batchLoader.clear();
}
@Test
public void testNullWhereListExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`/store/json/null_where_list_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_list_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void testNullWhereMapExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`/store/json/null_where_map_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_map_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void ensureProjectionPushdown() throws Exception {
// Tests to make sure that we are correctly eliminating schema changing columns. If completes, means that the projection pushdown was successful.
test("alter system set `store.json.all_text_mode` = false; "
+ "select t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
+ "from cp.`store/json/schema_change_int_to_string.json` t");
}
// The project pushdown rule is correctly adding the projected columns to the scan, however it is not removing
// the redundant project operator after the scan, this tests runs a physical plan generated from one of the tests to
// ensure that the project is filtering out the correct data in the scan alone
@Test
public void testProjectPushdown() throws Exception {
String[] queries = {Files.toString(FileUtils.getResourceAsFile("/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8)};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
test("alter system set `store.json.all_text_mode` = false");
runTestsOnFile(filename, UserBitShared.QueryType.PHYSICAL, queries, rowCounts);
List<QueryDataBatch> results = testPhysicalWithResults(queries[0]);
assertEquals(1, results.size());
// "`field_1`", "`field_3`.`inner_1`", "`field_3`.`inner_2`", "`field_4`.`inner_1`"
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
// this used to be five. It is now three. This is because the plan doesn't have a project.
// Scanners are not responsible for projecting non-existent columns (as long as they project one column)
assertEquals(3, batchLoader.getSchema().getFieldCount());
testExistentColumns(batchLoader);
batch.release();
batchLoader.clear();
}
private void testExistentColumns(RecordBatchLoader batchLoader) throws SchemaChangeException {
VectorWrapper<?> vw = batchLoader.getValueAccessorById(
RepeatedBigIntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_1")).getFieldIds() //
);
assertEquals("[1]", vw.getValueVector().getAccessor().getObject(0).toString());
assertEquals("[5]", vw.getValueVector().getAccessor().getObject(1).toString());
assertEquals("[5,10,15]", vw.getValueVector().getAccessor().getObject(2).toString());
vw = batchLoader.getValueAccessorById(
IntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_1")).getFieldIds() //
);
assertNull(vw.getValueVector().getAccessor().getObject(0));
assertEquals(2l, vw.getValueVector().getAccessor().getObject(1));
assertEquals(5l, vw.getValueVector().getAccessor().getObject(2));
vw = batchLoader.getValueAccessorById(
IntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_2")).getFieldIds() //
);
assertNull(vw.getValueVector().getAccessor().getObject(0));
assertNull(vw.getValueVector().getAccessor().getObject(1));
assertEquals(3l, vw.getValueVector().getAccessor().getObject(2));
vw = batchLoader.getValueAccessorById(
RepeatedBigIntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_4", "inner_1")).getFieldIds() //
);<|fim▁hole|> assertEquals("[4,5,6]", vw.getValueVector().getAccessor().getObject(2).toString());
}
}<|fim▁end|> | assertEquals("[]", vw.getValueVector().getAccessor().getObject(0).toString());
assertEquals("[1,2,3]", vw.getValueVector().getAccessor().getObject(1).toString()); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from flask import Flask
app = Flask(__name__)
<|fim▁hole|><|fim▁end|> | import views |
<|file_name|>TaskIconCache.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.quickstep;
import static com.android.launcher3.FastBitmapDrawable.newIcon;
import static com.android.launcher3.uioverrides.QuickstepLauncher.GO_LOW_RAM_RECENTS_ENABLED;
import static com.android.launcher3.util.Executors.MAIN_EXECUTOR;
import android.app.ActivityManager.TaskDescription;
import android.content.Context;
import android.content.pm.ActivityInfo;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Handler;
import android.os.Looper;
import android.os.UserHandle;
import android.util.SparseArray;
import android.view.accessibility.AccessibilityManager;
import androidx.annotation.WorkerThread;
import com.android.launcher3.FastBitmapDrawable;
import com.android.launcher3.R;
import com.android.launcher3.Utilities;
import com.android.launcher3.icons.BitmapInfo;
import com.android.launcher3.icons.IconProvider;
import com.android.launcher3.icons.LauncherIcons;
import com.android.launcher3.icons.cache.HandlerRunnable;
import com.android.launcher3.util.Preconditions;
import com.android.quickstep.util.TaskKeyLruCache;
import com.android.systemui.shared.recents.model.Task;
import com.android.systemui.shared.recents.model.Task.TaskKey;
import com.android.systemui.shared.system.ActivityManagerWrapper;
import com.android.systemui.shared.system.PackageManagerWrapper;
import com.android.systemui.shared.system.TaskDescriptionCompat;
import java.util.function.Consumer;
/**
* Manages the caching of task icons and related data.
*/
public class TaskIconCache {
private final Handler mBackgroundHandler;
private final AccessibilityManager mAccessibilityManager;
private final Context mContext;
private final TaskKeyLruCache<TaskCacheEntry> mIconCache;
private final SparseArray<BitmapInfo> mDefaultIcons = new SparseArray<>();
private final IconProvider mIconProvider;
public TaskIconCache(Context context, Looper backgroundLooper) {
mContext = context;
mBackgroundHandler = new Handler(backgroundLooper);
mAccessibilityManager = context.getSystemService(AccessibilityManager.class);
Resources res = context.getResources();
int cacheSize = res.getInteger(R.integer.recentsIconCacheSize);
mIconCache = new TaskKeyLruCache<>(cacheSize);
mIconProvider = new IconProvider(context);
}
/**
* Asynchronously fetches the icon and other task data.
*
* @param task The task to fetch the data for
* @param callback The callback to receive the task after its data has been populated.
* @return A cancelable handle to the request
*/
public IconLoadRequest updateIconInBackground(Task task, Consumer<Task> callback) {
Preconditions.assertUIThread();
if (task.icon != null) {<|fim▁hole|> }
IconLoadRequest request = new IconLoadRequest(mBackgroundHandler) {
@Override
public void run() {
TaskCacheEntry entry = getCacheEntry(task);
if (isCanceled()) {
// We don't call back to the provided callback in this case
return;
}
MAIN_EXECUTOR.execute(() -> {
task.icon = entry.icon;
task.titleDescription = entry.contentDescription;
callback.accept(task);
onEnd();
});
}
};
Utilities.postAsyncCallback(mBackgroundHandler, request);
return request;
}
public void clear() {
mIconCache.evictAll();
}
void onTaskRemoved(TaskKey taskKey) {
mIconCache.remove(taskKey);
}
void invalidateCacheEntries(String pkg, UserHandle handle) {
Utilities.postAsyncCallback(mBackgroundHandler,
() -> mIconCache.removeAll(key ->
pkg.equals(key.getPackageName()) && handle.getIdentifier() == key.userId));
}
@WorkerThread
private TaskCacheEntry getCacheEntry(Task task) {
TaskCacheEntry entry = mIconCache.getAndInvalidateIfModified(task.key);
if (entry != null) {
return entry;
}
TaskDescription desc = task.taskDescription;
TaskKey key = task.key;
ActivityInfo activityInfo = null;
// Create new cache entry
entry = new TaskCacheEntry();
// Load icon
// TODO: Load icon resource (b/143363444)
Bitmap icon = TaskDescriptionCompat.getIcon(desc, key.userId);
if (icon != null) {
entry.icon = new FastBitmapDrawable(getBitmapInfo(
new BitmapDrawable(mContext.getResources(), icon),
key.userId,
desc.getPrimaryColor(),
false /* isInstantApp */));
} else {
activityInfo = PackageManagerWrapper.getInstance().getActivityInfo(
key.getComponent(), key.userId);
if (activityInfo != null) {
BitmapInfo bitmapInfo = getBitmapInfo(
mIconProvider.getIcon(activityInfo, UserHandle.of(key.userId)),
key.userId,
desc.getPrimaryColor(),
activityInfo.applicationInfo.isInstantApp());
entry.icon = newIcon(mContext, bitmapInfo);
} else {
entry.icon = getDefaultIcon(key.userId);
}
}
// Loading content descriptions if accessibility or low RAM recents is enabled.
if (GO_LOW_RAM_RECENTS_ENABLED || mAccessibilityManager.isEnabled()) {
// Skip loading the content description if the activity no longer exists
if (activityInfo == null) {
activityInfo = PackageManagerWrapper.getInstance().getActivityInfo(
key.getComponent(), key.userId);
}
if (activityInfo != null) {
entry.contentDescription = ActivityManagerWrapper.getInstance()
.getBadgedContentDescription(activityInfo, task.key.userId,
task.taskDescription);
}
}
mIconCache.put(task.key, entry);
return entry;
}
@WorkerThread
private Drawable getDefaultIcon(int userId) {
synchronized (mDefaultIcons) {
BitmapInfo info = mDefaultIcons.get(userId);
if (info == null) {
try (LauncherIcons la = LauncherIcons.obtain(mContext)) {
info = la.makeDefaultIcon(UserHandle.of(userId));
}
mDefaultIcons.put(userId, info);
}
return new FastBitmapDrawable(info);
}
}
@WorkerThread
private BitmapInfo getBitmapInfo(Drawable drawable, int userId,
int primaryColor, boolean isInstantApp) {
try (LauncherIcons la = LauncherIcons.obtain(mContext)) {
la.disableColorExtraction();
la.setWrapperBackgroundColor(primaryColor);
// User version code O, so that the icon is always wrapped in an adaptive icon container
return la.createBadgedIconBitmap(drawable, UserHandle.of(userId),
Build.VERSION_CODES.O, isInstantApp);
}
}
public static abstract class IconLoadRequest extends HandlerRunnable {
IconLoadRequest(Handler handler) {
super(handler, null);
}
}
private static class TaskCacheEntry {
public Drawable icon;
public String contentDescription = "";
}
}<|fim▁end|> | // Nothing to load, the icon is already loaded
callback.accept(task);
return null; |
<|file_name|>ui-col.d.ts<|end_file_name|><|fim▁begin|>export declare class UICol {
protected element: Element;<|fim▁hole|> minWidth: string;
align: "" | "top" | "middle" | "bottom" | "stretch";
constructor(element: Element);
readonly sizes: string;
readonly classes: string;
}<|fim▁end|> | size: "auto" | "fill" | string;
width: string;
maxWidth: string; |
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Joël Grand-Guillaume (Camptocamp)
# Copyright 2010-2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,<|fim▁hole|># but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Add "To Send" and "To Validate" states in Invoices',
'version': '8.0.1.0.1',
'category': 'Generic Modules/Invoicing',
'description':
'''
This module adds 2 states between draft and open state in invoices:
- To Validate: For invoices which need a validation
- To Send: For all invoices that need to be sent
''',
'author': "Camptocamp,Odoo Community Association (OCA)",
'website': 'http://camptocamp.com',
'license': 'AGPL-3',
'depends': ['account'],
'data': [
'invoice_wkf.xml',
'invoice_view.xml',
],
'demo': [],
'test': [],
'installable': True,
'auto_install': False,
'application': False
}<|fim▁end|> | |
<|file_name|>state_test.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
<|fim▁hole|>
"github.com/TarantulaTechnology/fabric/core/ledger/testutil"
)
func TestStateChanges(t *testing.T) {
stateTestWrapper, state := createFreshDBAndConstructState(t)
// add keys
state.TxBegin("txUuid")
state.Set("chaincode1", "key1", []byte("value1"))
state.Set("chaincode1", "key2", []byte("value2"))
state.TxFinish("txUuid", true)
//chehck in-memory
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1"))
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", true))
delta := state.getStateDelta()
// save to db
stateTestWrapper.persistAndClearInMemoryChanges(0)
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1"))
testutil.AssertEquals(t, stateTestWrapper.fetchStateDeltaFromDB(0), delta)
// make changes when data is already in db
state.TxBegin("txUuid")
state.Set("chaincode1", "key1", []byte("new_value1"))
state.TxFinish("txUuid", true)
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("new_value1"))
state.TxBegin("txUuid")
state.Delete("chaincode1", "key2")
state.TxFinish("txUuid", true)
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", false))
state.TxBegin("txUuid")
state.Set("chaincode2", "key3", []byte("value3"))
state.Set("chaincode2", "key4", []byte("value4"))
state.TxFinish("txUuid", true)
delta = state.getStateDelta()
stateTestWrapper.persistAndClearInMemoryChanges(1)
testutil.AssertEquals(t, stateTestWrapper.fetchStateDeltaFromDB(1), delta)
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("new_value1"))
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", true))
testutil.AssertEquals(t, stateTestWrapper.get("chaincode2", "key3", true), []byte("value3"))
}
func TestStateTxBehavior(t *testing.T) {
stateTestWrapper, state := createFreshDBAndConstructState(t)
if state.txInProgress() {
t.Fatalf("No tx should be reported to be in progress")
}
// set state in a successful tx
state.TxBegin("txUuid")
state.Set("chaincode1", "key1", []byte("value1"))
state.Set("chaincode2", "key2", []byte("value2"))
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1"))
state.TxFinish("txUuid", true)
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1"))
// set state in a failed tx
state.TxBegin("txUuid1")
state.Set("chaincode1", "key1", []byte("value1_new"))
state.Set("chaincode2", "key2", []byte("value2_new"))
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1_new"))
state.TxFinish("txUuid1", false)
//older state should be available
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", false), []byte("value1"))
// delete state in a successful tx
state.TxBegin("txUuid2")
state.Delete("chaincode1", "key1")
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", false))
state.TxFinish("txUuid2", true)
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", false))
// // delete state in a failed tx
state.TxBegin("txUuid2")
state.Delete("chaincode2", "key2")
testutil.AssertNil(t, stateTestWrapper.get("chaincode2", "key2", false))
state.TxFinish("txUuid2", false)
testutil.AssertEquals(t, stateTestWrapper.get("chaincode2", "key2", false), []byte("value2"))
}
func TestStateTxWrongCallCausePanic_1(t *testing.T) {
_, state := createFreshDBAndConstructState(t)
defer testutil.AssertPanic(t, "A panic should occur when a set state is invoked with out calling a tx-begin")
state.Set("chaincodeID1", "key1", []byte("value1"))
}
func TestStateTxWrongCallCausePanic_2(t *testing.T) {
_, state := createFreshDBAndConstructState(t)
defer testutil.AssertPanic(t, "A panic should occur when a tx-begin is invoked before tx-finish for on-going tx")
state.TxBegin("txUuid")
state.TxBegin("anotherUuid")
}
func TestStateTxWrongCallCausePanic_3(t *testing.T) {
_, state := createFreshDBAndConstructState(t)
defer testutil.AssertPanic(t, "A panic should occur when Uuid for tx-begin and tx-finish ends")
state.TxBegin("txUuid")
state.TxFinish("anotherUuid", true)
}
func TestDeleteState(t *testing.T) {
stateTestWrapper, state := createFreshDBAndConstructState(t)
// Add keys
state.TxBegin("txUuid")
state.Set("chaincode1", "key1", []byte("value1"))
state.Set("chaincode1", "key2", []byte("value2"))
state.TxFinish("txUuid", true)
state.getStateDelta()
stateTestWrapper.persistAndClearInMemoryChanges(0)
// confirm keys are present
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1"))
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key2", true), []byte("value2"))
// Delete the State
err := state.DeleteState()
if err != nil {
t.Fatalf("Error deleting the state: %s", err)
}
// confirm the values are empty
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", false))
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", false))
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", true))
testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", true))
// Confirm that we can now store new stuff in the state
state.TxBegin("txUuid")
state.Set("chaincode1", "key1", []byte("value1"))
state.Set("chaincode1", "key2", []byte("value2"))
state.TxFinish("txUuid", true)
state.getStateDelta()
stateTestWrapper.persistAndClearInMemoryChanges(1)
// confirm keys are present
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1"))
testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key2", true), []byte("value2"))
}
func TestStateDeltaSizeSetting(t *testing.T) {
_, state := createFreshDBAndConstructState(t)
if state.historyStateDeltaSize != 500 {
t.Fatalf("Error reading historyStateDeltaSize. Expected 500, but got %d", state.historyStateDeltaSize)
}
}<|fim▁end|> | package state
import (
"testing" |
<|file_name|>cufflink.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
process.env.FORCE_COLOR = true;
const program = require('commander');
const package = require('../package.json');
/**<|fim▁hole|> */
program
.version((package.name) + '@' + (package.version));
/**
* Command for creating and seeding
*/
program
.command('create [dataObject]', 'Generate seed data').alias('c')
.command('teardown', 'Tear down seed data').alias('t')
.parse(process.argv);<|fim▁end|> | * CLI Commands
* |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
def home(request):
return render(request, 'home.html', {'context_var': 'expected'})
def withUrlFields(request, value):<|fim▁hole|>@login_required
def requiresLogin(request):
return HttpResponse('logged in')
def httpStatus(request, status):
return HttpResponse(status=int(status))<|fim▁end|> | return HttpResponse(value)
|
<|file_name|>multi_threading.py<|end_file_name|><|fim▁begin|># coding:utf-8
# 测试多线程
import threading
import time
from utils import fn_timer
from multiprocessing.dummy import Pool
import requests
from utils import urls
# 耗时任务:听音乐
def music(name):
print 'I am listening to music {0}'.format(name)
time.sleep(1)
# 耗时任务:看电影
def movie(name):
print 'I am watching movie {0}'.format(name)
time.sleep(5)
# 单线程操作:顺序执行听10首音乐,看2部电影
@fn_timer
def single_thread():
for i in range(10):
music(i)
for i in range(2):
movie(i)
# 多线程执行:听10首音乐,看2部电影
@fn_timer
def multi_thread():
# 线程列表
threads = []
for i in range(10):
# 创建一个线程,target参数为任务处理函数,args为任务处理函数所需的参数元组
threads.append(threading.Thread(target = music,args = (i,)))
for i in range(2):
threads.append(threading.Thread(target = movie,args = (i,)))
for t in threads:
# 设为守护线程
t.setDaemon(True)
# 开始线程
t.start()
for t in threads:
t.join()
# 使用线程池执行:听10首音乐,看2部电影
@fn_timer
def use_pool():
# 设置线程池大小为20,如果不设置,默认值是CPU核心数
pool = Pool(20)
pool.map(movie,range(2))
pool.map(music,range(10))
pool.close()
pool.join()
# 应用:使用单线程下载多个网页的内容
@fn_timer
def download_using_single_thread(urls):
resps = []
for url in urls:
resp = requests.get(url)
resps.append(resp)
return resps
# 应用:使用多线程下载多个网页的内容
@fn_timer
def download_using_multi_thread(urls):
threads = []
for url in urls:
threads.append(threading.Thread(target = requests.get,args = (url,)))
for t in threads:
t.setDaemon(True)
t.start()
for t in threads:
t.join()
# 应用:使用线程池下载多个网页的内容
@fn_timer
def download_using_pool(urls):
pool = Pool(20)
# 第一个参数为函数名,第二个参数一个可迭代对象,为函数所需的参数列表
resps = pool.map(requests.get,urls)
pool.close()
pool.join()
return resps
def main():
# 测试单线程
# single_thread()
# 输出:
'''
I am listening to music 0
<|fim▁hole|> I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:single_thread in 20.14s]
'''
# 测试多线程
# multi_thread()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:multi_thread in 5.02s]
'''
# 测试线程池
# use_pool()
# 输出:
'''
I am listening to music 0
I am listening to music 1
I am listening to music 2
I am listening to music 3
I am listening to music 4
I am listening to music 5
I am listening to music 6
I am listening to music 7
I am listening to music 8
I am listening to music 9
I am watching movie 0
I am watching movie 1
[finished function:use_pool in 6.12s]
'''
# 1.使用单线程
# resps = download_using_single_thread(urls)
# print len(resps)
# 输出:
'''
[finished function:download_using_single_thread in 6.18s]
20
'''
# 2. 使用多线程
# download_using_multi_thread(urls)
# 输出:
'''
[finished function:download_using_multi_thread in 0.73s]
'''
# 3.使用线程池
resps = download_using_pool(urls)
print len(resps)
# 输出:
'''
[finished function:download_using_pool in 0.84s]
20
'''
if __name__ == '__main__':
main()<|fim▁end|> | I am listening to music 1
I am listening to music 2
|
<|file_name|>test_artificial_32_Quantization_PolyTrend_12_12_0.py<|end_file_name|><|fim▁begin|>import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
<|fim▁hole|>
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 12, transform = "Quantization", sigma = 0.0, exog_count = 0, ar_order = 12);<|fim▁end|> | |
<|file_name|>DomainDiagnostics.ts<|end_file_name|><|fim▁begin|>/* --------------------------------------------------------------------------------------------
* Copyright (c) Jan Dolejsi. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* ------------------------------------------------------------------------------------------ */
'use strict';
import { FileInfo, Variable, DomainInfo } from 'pddl-workspace';
import { CodePddlWorkspace } from "../workspace/CodePddlWorkspace";
import { PddlWorkspace } from 'pddl-workspace';
import { languages, DiagnosticCollection, Diagnostic, DiagnosticSeverity, DiagnosticTag } from "vscode";
import { PDDL } from 'pddl-workspace';
import { toRange, toUri } from "../utils";
export const UNUSED = 'unused';
/**
* Domain file diagnostics.
*/
export class DomainDiagnostics {
diagnosticCollection: DiagnosticCollection;
constructor(codePddlWorkspace: CodePddlWorkspace) {
this.diagnosticCollection = languages.createDiagnosticCollection(PDDL+'2');
codePddlWorkspace.pddlWorkspace.on(PddlWorkspace.UPDATED, (fileInfo: FileInfo) => {
if (fileInfo.isDomain()) {
this.validateDomain(fileInfo as DomainInfo);
}
});
}
validateDomain(domainInfo: DomainInfo): void {
const predicateDiagnostic = domainInfo.getPredicates()
.map(p => this.toUnusedDiagnostic(domainInfo, p, 'predicate'))
.filter(diagnostic => !!diagnostic)
.map(diagnostics => diagnostics!);
const functionDiagnostic = domainInfo.getFunctions()
.map(p => this.toUnusedDiagnostic(domainInfo, p, 'function'))
.filter(diagnostic => !!diagnostic)
.map(diagnostics => diagnostics!);
const diagnostics = predicateDiagnostic.concat(functionDiagnostic);<|fim▁hole|> }
toUnusedDiagnostic(domainInfo: DomainInfo, variable: Variable, variableType: string): Diagnostic | undefined {
const references = domainInfo.getVariableReferences(variable);
if (references.length === 1) {
const diagnostic = new Diagnostic(toRange(references[0]), `Unused ${variableType} (${variable.declaredName})`, DiagnosticSeverity.Hint);
diagnostic.tags = [DiagnosticTag.Unnecessary];
diagnostic.code = UNUSED;
return diagnostic;
}
return undefined;
}
}<|fim▁end|> |
this.diagnosticCollection.set(toUri(domainInfo.fileUri), diagnostics); |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import asyncio
try:
from unittest.mock import Mock, create_autospec
except ImportError:
from mock import Mock, create_autospec
from uuid import uuid4
from functools import wraps
from copy import copy
from unittest import TestCase as unittestTestCase
from zeroservices.exceptions import ServiceUnavailable
from zeroservices.resources import (ResourceCollection, Resource,
is_callable, ResourceService)
from zeroservices.medium import BaseMedium
from zeroservices.medium.memory import MemoryMedium
from zeroservices.discovery.memory import MemoryDiscoveryMedium
from zeroservices.memory import MemoryCollection, MemoryResource
from zeroservices import BaseService
from zeroservices.query import match
class TestCase(unittestTestCase):
def assertItemsEqual(self, *args):
if hasattr(self, 'assertCountEqual'):
return self.assertCountEqual(*args)
return super(TestCase, self).assertItemsEqual(*args)
def assertDictIsSubset(self, subset, superset):
for item in subset.items():
self.assertIn(item, superset.items())
def test_medium():
return Mock(spec_set=BaseMedium)
<|fim▁hole|>class TestResource(MemoryResource):
@is_callable
def custom_action(self, *arhs, **kwargs):
return 42
class TestCollection(MemoryCollection):
resource_class = TestResource
@is_callable
def custom_action(self, *args, **kwargs):
return 42
def sample_collection(sample_resource_name):
return TestCollection(sample_resource_name)
class TestService(BaseService):
def __init__(self, *args, node_infos=None, **kwargs):
super().__init__(*args, **kwargs)
self.on_message_mock = Mock()
self.on_event_mock = Mock()
self.node_infos = node_infos or {}
def service_info(self):
base_infos = copy(self.node_infos)
base_infos.update(super().service_info())
return base_infos
@asyncio.coroutine
def on_message(self, *args, **kwargs):
return self.on_message_mock(*args, **kwargs)
@asyncio.coroutine
def on_event(self, *args, **kwargs):
return self.on_event_mock(*args, **kwargs)
def _create_test_service(name, node_infos, loop):
medium = MemoryMedium(loop, MemoryDiscoveryMedium)
service = TestService(name, medium, node_infos=node_infos)
return service
class TestResourceService(ResourceService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.on_event_mock = Mock()
@asyncio.coroutine
def on_event(self, *args, **kwargs):
return self.on_event_mock(*args, **kwargs)
def _create_test_resource_service(name, loop):
medium = MemoryMedium(loop, MemoryDiscoveryMedium)
service = TestResourceService(name, medium)
return service
def _async_test(f):
@wraps(f)
def wrapper(self, *args, **kwargs):
if not self.loop.is_running():
coro = asyncio.coroutine(f)
future = coro(self, *args, **kwargs)
self.loop.run_until_complete(asyncio.wait_for(future, 2, loop=self.loop))
else:
return f(self, *args, **kwargs)
return wrapper<|fim▁end|> | |
<|file_name|>CofenseTriageThreatEnrichment.py<|end_file_name|><|fim▁begin|>from CommonServerPython import *
''' STANDALONE FUNCTION '''
<|fim▁hole|>
def get_threat_indicator_list(args: Dict[str, Any]) -> list:
"""
Executes cofense-threat-indicator-list command for given arguments.
:type args: ``Dict[str, Any]``
:param args: The script arguments provided by the user.
:return: List of responses.
:rtype: ``list``
"""
# Fetch threat indicators based on threat value provided in the argument.
# cofense-threat-indicator-list command will enrich the information based on value.
threat_indicator = execute_command('cofense-threat-indicator-list',
{'threat_value': f"{args.get('threat_value')}"},
extract_contents=False)
# Populate response
return threat_indicator
''' MAIN FUNCTION '''
def main():
try:
return_results(get_threat_indicator_list(demisto.args()))
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
return_error(f'Failed to execute CofenseTriageThreatEnrichment. Error: {str(e)}')
''' ENTRY POINT '''
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()<|fim▁end|> | |
<|file_name|>feedbackStatusbarItem.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { IDisposable } from 'vs/base/common/lifecycle';
import { IStatusbarItem } from 'vs/workbench/browser/parts/statusbar/statusbar';
import { FeedbackDropdown, IFeedback, IFeedbackService } from './feedback';
import { IContextViewService } from 'vs/platform/contextview/browser/contextView';
import { IInstantiationService } from 'vs/platform/instantiation/common/instantiation';
import product from 'vs/platform/node/product';
import { Themable, STATUS_BAR_FOREGROUND, STATUS_BAR_NO_FOLDER_FOREGROUND } from 'vs/workbench/common/theme';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { IWorkspaceContextService, WorkbenchState } from 'vs/platform/workspace/common/workspace';
class TwitterFeedbackService implements IFeedbackService {
private static TWITTER_URL: string = 'https://twitter.com/intent/tweet';
private static VIA_NAME: string = 'code';
private static HASHTAGS: string[] = ['HappyCoding'];
private combineHashTagsAsString(): string {
return TwitterFeedbackService.HASHTAGS.join(',');
}
public submitFeedback(feedback: IFeedback): void {
const queryString = `?${feedback.sentiment === 1 ? `hashtags=${this.combineHashTagsAsString()}&` : null}ref_src=twsrc%5Etfw&related=twitterapi%2Ctwitter&text=${feedback.feedback}&tw_p=tweetbutton&via=${TwitterFeedbackService.VIA_NAME}`;
const url = TwitterFeedbackService.TWITTER_URL + queryString;
window.open(url);
}
public getCharacterLimit(sentiment: number): number {
let length: number = 0;
if (sentiment === 1) {
TwitterFeedbackService.HASHTAGS.forEach(element => {
length += element.length + 2;
});
}
if (TwitterFeedbackService.VIA_NAME) {
length += ` via @${TwitterFeedbackService.VIA_NAME}`.length;
}
return 140 - length;
}
}
export class FeedbackStatusbarItem extends Themable implements IStatusbarItem {
private dropdown: FeedbackDropdown;
constructor(
@IInstantiationService private instantiationService: IInstantiationService,
@IContextViewService private contextViewService: IContextViewService,
@IWorkspaceContextService private contextService: IWorkspaceContextService,
@IThemeService themeService: IThemeService
) {
super(themeService);
this.registerListeners();
}
private registerListeners(): void {
this.toUnbind.push(this.contextService.onDidChangeWorkbenchState(() => this.updateStyles()));
}
protected updateStyles(): void {
super.updateStyles();
if (this.dropdown) {
this.dropdown.label.style('background-color', this.getColor(this.contextService.getWorkbenchState() !== WorkbenchState.EMPTY ? STATUS_BAR_FOREGROUND : STATUS_BAR_NO_FOLDER_FOREGROUND));
}
}
public render(element: HTMLElement): IDisposable {
if (product.sendASmile) {
this.dropdown = this.instantiationService.createInstance(FeedbackDropdown, element, {
contextViewProvider: this.contextViewService,
feedbackService: this.instantiationService.createInstance(TwitterFeedbackService)
});
this.updateStyles();
return this.dropdown;
}<|fim▁hole|>
return null;
}
}<|fim▁end|> | |
<|file_name|>pkg.installspace.context.pc.py<|end_file_name|><|fim▁begin|># generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []<|fim▁hole|>PROJECT_VERSION = "0.0.0"<|fim▁end|> | PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "r2_description"
PROJECT_SPACE_DIR = "/home/mkhuthir/learnROS/src/chessbot/install" |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
from django.utils.translation import ugettext_lazy as _
class Contact(models.Model):
name = models.CharField(max_length=255, verbose_name=_("namn"))
email = models.EmailField(verbose_name=_("e-post"))
def __str__(self):<|fim▁hole|> verbose_name_plural = _("Kontaktval")<|fim▁end|> | return self.name
class Meta:
verbose_name = _("Kontaktval") |
<|file_name|>build_errortext.py<|end_file_name|><|fim▁begin|>import sys
import os.path
import setuptools # Fix distutils issues
from cffi import FFI
ffi = FFI()
mod_name = 'instrumental.drivers.cameras._pixelfly.errortext'<|fim▁hole|> #define PCO_ERR_H_CREATE_OBJECT
#define PCO_ERRT_H_CREATE_OBJECT
#include <windows.h>
#include "PCO_errt.h"
""", include_dirs=[os.path.dirname(__file__)])
ffi.cdef("void PCO_GetErrorText(DWORD dwerr, char* pbuf, DWORD dwlen);")
else:
ffi.set_source(mod_name, '')
if __name__ == '__main__':
ffi.compile()<|fim▁end|> |
if sys.platform.startswith('win'):
ffi.set_source(mod_name, """ |
<|file_name|>HeroicStrike.java<|end_file_name|><|fim▁begin|>package com.hearthsim.card.basic.spell;
import com.hearthsim.card.spellcard.SpellTargetableCard;
import com.hearthsim.event.effect.EffectCharacter;
import com.hearthsim.event.effect.EffectCharacterBuffTemp;
import com.hearthsim.event.filter.FilterCharacter;
import com.hearthsim.event.filter.FilterCharacterTargetedSpell;
public class HeroicStrike extends SpellTargetableCard {
private final static EffectCharacter effect = new EffectCharacterBuffTemp(4);
/**
* Constructor
*
* Defaults to hasBeenUsed = false
*/
public HeroicStrike() {
super();
}
@Override
public FilterCharacter getTargetableFilter() {
return FilterCharacterTargetedSpell.SELF;
}
/**
* Heroic Strike
*
* Gives the hero +4 attack this turn
*<|fim▁hole|> * @param side
* @param boardState The BoardState before this card has performed its action. It will be manipulated and returned.
*
* @return The boardState is manipulated and returned
*/
@Override
public EffectCharacter getTargetableEffect() {
return HeroicStrike.effect;
}
}<|fim▁end|> | *
* |
<|file_name|>contentClientLibraries.ts<|end_file_name|><|fim▁begin|>// Constants
import {CLIENT_LIBS} from 'src/shared/constants/routes'
// Types
import {WriteDataItem, WriteDataSection} from 'src/writeData/constants'
// Markdown
import ArduinoMarkdown from 'src/writeData/components/clientLibraries/Arduino.md'
import CSharpMarkdown from 'src/writeData/components/clientLibraries/CSharp.md'
import GoMarkdown from 'src/writeData/components/clientLibraries/Go.md'
import JavaMarkdown from 'src/writeData/components/clientLibraries/Java.md'
import NodeMarkdown from 'src/writeData/components/clientLibraries/Node.md'
import PythonMarkdown from 'src/writeData/components/clientLibraries/Python.md'
import RubyMarkdown from 'src/writeData/components/clientLibraries/Ruby.md'
import PHPMarkdown from 'src/writeData/components/clientLibraries/PHP.md'
import KotlinMarkdown from 'src/writeData/components/clientLibraries/Kotlin.md'
import ScalaMarkdown from 'src/writeData/components/clientLibraries/Scala.md'
// Graphics
import arduinoLogo from 'src/writeData/graphics/arduinoLogo.svg'
import csharpLogo from 'src/writeData/graphics/csharpLogo.svg'
import goLogo from 'src/writeData/graphics/goLogo.svg'
import javaLogo from 'src/writeData/graphics/javaLogo.svg'
import nodeLogo from 'src/writeData/graphics/nodeLogo.svg'
import pythonLogo from 'src/writeData/graphics/pythonLogo.svg'
import rubyLogo from 'src/writeData/graphics/rubyLogo.svg'
import phpLogo from 'src/writeData/graphics/phpLogo.svg'
import kotlinLogo from 'src/writeData/graphics/kotlinLogo.svg'
import scalaLogo from 'src/writeData/graphics/scalaLogo.svg'
export const WRITE_DATA_CLIENT_LIBRARIES: WriteDataItem[] = [
{
id: 'arduino',
name: 'Arduino',
url: `${CLIENT_LIBS}/arduino`,
image: arduinoLogo,
markdown: ArduinoMarkdown,
},
{
id: 'csharp',
name: 'C#',
url: `${CLIENT_LIBS}/csharp`,
image: csharpLogo,
markdown: CSharpMarkdown,
},
{
id: 'go',
name: 'GO',
url: `${CLIENT_LIBS}/go`,
image: goLogo,
markdown: GoMarkdown,
},
{
id: 'java',
name: 'Java',
url: `${CLIENT_LIBS}/java`,
image: javaLogo,
markdown: JavaMarkdown,
},
{
id: 'javascript-node',
name: 'JavaScript/Node.js',
url: `${CLIENT_LIBS}/javascript-node`,
image: nodeLogo,
markdown: NodeMarkdown,
},
{
id: 'python',
name: 'Python',
url: `${CLIENT_LIBS}/python`,
image: pythonLogo,
markdown: PythonMarkdown,
},
{
id: 'ruby',
name: 'Ruby',
url: `${CLIENT_LIBS}/ruby`,
image: rubyLogo,
markdown: RubyMarkdown,
},
{
id: 'php',
name: 'PHP',
url: `${CLIENT_LIBS}/php`,
image: phpLogo,
markdown: PHPMarkdown,
},
{
id: 'kotlin',
name: 'Kotlin',
url: `${CLIENT_LIBS}/kotlin`,
image: kotlinLogo,
markdown: KotlinMarkdown,
},
{
id: 'scala',
name: 'Scala',
url: `${CLIENT_LIBS}/scala`,
image: scalaLogo,
markdown: ScalaMarkdown,
},
]
const WRITE_DATA_CLIENT_LIBRARIES_SECTION: WriteDataSection = {
id: CLIENT_LIBS,
name: 'Client Libraries',<|fim▁hole|> items: WRITE_DATA_CLIENT_LIBRARIES,
featureFlag: 'load-data-client-libraries',
}
export default WRITE_DATA_CLIENT_LIBRARIES_SECTION<|fim▁end|> | description: 'Back-end, front-end, and mobile applications', |
<|file_name|>ActivityBasedTimeoutPolicyRequest.java<|end_file_name|><|fim▁begin|>// Template Source: BaseEntityRequest.java.tt
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
package com.microsoft.graph.requests;
import com.microsoft.graph.http.IRequestBuilder;
import com.microsoft.graph.core.ClientException;
import com.microsoft.graph.models.ActivityBasedTimeoutPolicy;
import com.microsoft.graph.models.DirectoryObject;
import com.microsoft.graph.models.ExtensionProperty;
import java.util.Arrays;
import java.util.EnumSet;
import javax.annotation.Nullable;
import javax.annotation.Nonnull;
import com.microsoft.graph.core.IBaseClient;
import com.microsoft.graph.http.BaseRequest;
import com.microsoft.graph.http.HttpMethod;
// **NOTE** This file was generated by a tool and any changes will be overwritten.
/**
* The class for the Activity Based Timeout Policy Request.
*/
public class ActivityBasedTimeoutPolicyRequest extends BaseRequest<ActivityBasedTimeoutPolicy> {
/**
* The request for the ActivityBasedTimeoutPolicy
*
* @param requestUrl the request URL
* @param client the service client
* @param requestOptions the options for this request
*/
public ActivityBasedTimeoutPolicyRequest(@Nonnull final String requestUrl, @Nonnull final IBaseClient<?> client, @Nullable final java.util.List<? extends com.microsoft.graph.options.Option> requestOptions) {
super(requestUrl, client, requestOptions, ActivityBasedTimeoutPolicy.class);
}
/**
* Gets the ActivityBasedTimeoutPolicy from the service
*
* @return a future with the result
*/
@Nonnull
public java.util.concurrent.CompletableFuture<ActivityBasedTimeoutPolicy> getAsync() {
return sendAsync(HttpMethod.GET, null);
}
/**
* Gets the ActivityBasedTimeoutPolicy from the service
*
* @return the ActivityBasedTimeoutPolicy from the request
* @throws ClientException this exception occurs if the request was unable to complete for any reason
*/
@Nullable
public ActivityBasedTimeoutPolicy get() throws ClientException {
return send(HttpMethod.GET, null);
}
/**
* Delete this item from the service
*
* @return a future with the deletion result
*/<|fim▁hole|> }
/**
* Delete this item from the service
* @return the resulting response if the service returns anything on deletion
*
* @throws ClientException if there was an exception during the delete operation
*/
@Nullable
public ActivityBasedTimeoutPolicy delete() throws ClientException {
return send(HttpMethod.DELETE, null);
}
/**
* Patches this ActivityBasedTimeoutPolicy with a source
*
* @param sourceActivityBasedTimeoutPolicy the source object with updates
* @return a future with the result
*/
@Nonnull
public java.util.concurrent.CompletableFuture<ActivityBasedTimeoutPolicy> patchAsync(@Nonnull final ActivityBasedTimeoutPolicy sourceActivityBasedTimeoutPolicy) {
return sendAsync(HttpMethod.PATCH, sourceActivityBasedTimeoutPolicy);
}
/**
* Patches this ActivityBasedTimeoutPolicy with a source
*
* @param sourceActivityBasedTimeoutPolicy the source object with updates
* @return the updated ActivityBasedTimeoutPolicy
* @throws ClientException this exception occurs if the request was unable to complete for any reason
*/
@Nullable
public ActivityBasedTimeoutPolicy patch(@Nonnull final ActivityBasedTimeoutPolicy sourceActivityBasedTimeoutPolicy) throws ClientException {
return send(HttpMethod.PATCH, sourceActivityBasedTimeoutPolicy);
}
/**
* Creates a ActivityBasedTimeoutPolicy with a new object
*
* @param newActivityBasedTimeoutPolicy the new object to create
* @return a future with the result
*/
@Nonnull
public java.util.concurrent.CompletableFuture<ActivityBasedTimeoutPolicy> postAsync(@Nonnull final ActivityBasedTimeoutPolicy newActivityBasedTimeoutPolicy) {
return sendAsync(HttpMethod.POST, newActivityBasedTimeoutPolicy);
}
/**
* Creates a ActivityBasedTimeoutPolicy with a new object
*
* @param newActivityBasedTimeoutPolicy the new object to create
* @return the created ActivityBasedTimeoutPolicy
* @throws ClientException this exception occurs if the request was unable to complete for any reason
*/
@Nullable
public ActivityBasedTimeoutPolicy post(@Nonnull final ActivityBasedTimeoutPolicy newActivityBasedTimeoutPolicy) throws ClientException {
return send(HttpMethod.POST, newActivityBasedTimeoutPolicy);
}
/**
* Creates a ActivityBasedTimeoutPolicy with a new object
*
* @param newActivityBasedTimeoutPolicy the object to create/update
* @return a future with the result
*/
@Nonnull
public java.util.concurrent.CompletableFuture<ActivityBasedTimeoutPolicy> putAsync(@Nonnull final ActivityBasedTimeoutPolicy newActivityBasedTimeoutPolicy) {
return sendAsync(HttpMethod.PUT, newActivityBasedTimeoutPolicy);
}
/**
* Creates a ActivityBasedTimeoutPolicy with a new object
*
* @param newActivityBasedTimeoutPolicy the object to create/update
* @return the created ActivityBasedTimeoutPolicy
* @throws ClientException this exception occurs if the request was unable to complete for any reason
*/
@Nullable
public ActivityBasedTimeoutPolicy put(@Nonnull final ActivityBasedTimeoutPolicy newActivityBasedTimeoutPolicy) throws ClientException {
return send(HttpMethod.PUT, newActivityBasedTimeoutPolicy);
}
/**
* Sets the select clause for the request
*
* @param value the select clause
* @return the updated request
*/
@Nonnull
public ActivityBasedTimeoutPolicyRequest select(@Nonnull final String value) {
addSelectOption(value);
return this;
}
/**
* Sets the expand clause for the request
*
* @param value the expand clause
* @return the updated request
*/
@Nonnull
public ActivityBasedTimeoutPolicyRequest expand(@Nonnull final String value) {
addExpandOption(value);
return this;
}
}<|fim▁end|> | @Nonnull
public java.util.concurrent.CompletableFuture<ActivityBasedTimeoutPolicy> deleteAsync() {
return sendAsync(HttpMethod.DELETE, null); |
<|file_name|>uhd_rx_cfile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2011 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
"""
Read samples from a UHD device and write to file formatted as binary
outputs single precision complex float values or complex short values
(interleaved 16 bit signed short integers).
"""
from gnuradio import gr, eng_notation
from gnuradio import uhd
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
n2s = eng_notation.num_to_str
class rx_cfile_block(gr.top_block):
def __init__(self, options, filename):
gr.top_block.__init__(self)
# Create a UHD device source
if options.output_shorts:
self._u = uhd.usrp_source(device_addr=options.address,
io_type=uhd.io_type.COMPLEX_INT16,
num_channels=1)
self._sink = gr.file_sink(gr.sizeof_short*2, filename)
else:
self._u = uhd.usrp_source(device_addr=options.address,
io_type=uhd.io_type.COMPLEX_FLOAT32,
num_channels=1)
self._sink = gr.file_sink(gr.sizeof_gr_complex, filename)
# Set receiver sample rate
self._u.set_samp_rate(options.samp_rate)
# Set receive daughterboard gain
if options.gain is None:
g = self._u.get_gain_range()
options.gain = float(g.start()+g.stop())/2
print "Using mid-point gain of", options.gain, "(", g.start(), "-", g.stop(), ")"
self._u.set_gain(options.gain)
# Set the antenna
if(options.antenna):
self._u.set_antenna(options.antenna, 0)
# Set frequency (tune request takes lo_offset)
if(options.lo_offset is not None):
treq = uhd.tune_request(options.freq, options.lo_offset)
else:
treq = uhd.tune_request(options.freq)
tr = self._u.set_center_freq(treq)
if tr == None:
sys.stderr.write('Failed to set center frequency\n')
raise SystemExit, 1
# Create head block if needed and wire it up
if options.nsamples is None:<|fim▁hole|> self._head = gr.head(gr.sizeof_short*2, int(options.nsamples))
else:
self._head = gr.head(gr.sizeof_gr_complex, int(options.nsamples))
self.connect(self._u, self._head, self._sink)
input_rate = self._u.get_samp_rate()
if options.verbose:
print "Address:", options.address
print "Rx gain:", options.gain
print "Rx baseband frequency:", n2s(tr.actual_rf_freq)
print "Rx DDC frequency:", n2s(tr.actual_dsp_freq)
print "Rx Sample Rate:", n2s(input_rate)
if options.nsamples is None:
print "Receiving samples until Ctrl-C"
else:
print "Receving", n2s(options.nsamples), "samples"
if options.output_shorts:
print "Writing 16-bit complex shorts"
else:
print "Writing 32-bit complex floats"
print "Output filename:", filename
def get_options():
usage="%prog: [options] output_filename"
parser = OptionParser(option_class=eng_option, usage=usage)
parser.add_option("-a", "--address", type="string", default="addr=192.168.10.2",
help="Address of UHD device, [default=%default]")
parser.add_option("-A", "--antenna", type="string", default=None,
help="select Rx Antenna where appropriate")
parser.add_option("", "--samp-rate", type="eng_float", default=1e6,
help="set sample rate (bandwidth) [default=%default]")
parser.add_option("-f", "--freq", type="eng_float", default=None,
help="set frequency to FREQ", metavar="FREQ")
parser.add_option("-g", "--gain", type="eng_float", default=None,
help="set gain in dB (default is midpoint)")
parser.add_option( "-s","--output-shorts", action="store_true", default=False,
help="output interleaved shorts instead of complex floats")
parser.add_option("-N", "--nsamples", type="eng_float", default=None,
help="number of samples to collect [default=+inf]")
parser.add_option("-v", "--verbose", action="store_true", default=False,
help="verbose output")
parser.add_option("", "--lo-offset", type="eng_float", default=None,
help="set daughterboard LO offset to OFFSET [default=hw default]")
(options, args) = parser.parse_args ()
if len(args) != 1:
parser.print_help()
raise SystemExit, 1
if options.freq is None:
parser.print_help()
sys.stderr.write('You must specify the frequency with -f FREQ\n');
raise SystemExit, 1
return (options, args[0])
if __name__ == '__main__':
(options, filename) = get_options()
tb = rx_cfile_block(options, filename)
try:
tb.run()
except KeyboardInterrupt:
pass<|fim▁end|> | self.connect(self._u, self._sink)
else:
if options.output_shorts: |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name="fant_sizer",
version="0.7",
author="Rypiuk Oleksandr",
author_email="[email protected]",
description="fant_sizer command-line file-information",
url="https://github.com/ripiuk/fant_sizer",
keywords="file command-line information size tool recursively",
license="MIT",
classifiers=[
'Topic :: Utilities',
'Environment :: Console',
'Natural Language :: English',<|fim▁hole|> 'Intended Audience :: Information Technology',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.6'
],
packages=find_packages(),
long_description=open(join(dirname(__file__), "README.rst")).read(),
entry_points={
"console_scripts":
['fant_sizer = fant_sizer.fant_sizer:_main'],
},
)<|fim▁end|> | 'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers', |
<|file_name|>NanoTrasenBot.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This script is shared under the
# Creative Commons Attribution-ShareAlike 3.0 license (CC BY-SA 3.0)
# Added clause to Attribution:
# - You may not remove or hide the '<Bot_name> who created you?' functionality
# and you may not modify the name given in the response.
#CREDITS
# Author: Skibiliano
# "Foreign" Modules:
# Psyco 2.0 / Psyco 1.6
################# DEBUG STUFF #####################
import sys
import CORE_DATA
import urllib2
import socket
import irchat
################## END OF DEBUG STUFF ##############
#
# PSYCO
write_to_a_file = False #Only affects psyco
write_youtube_to_file = True #True = YTCV4 will load, false = YTCV3 will load
try:
import psyco
except ImportError:
print 'Psyco not installed, the program will just run slower'
psyco_exists = False
if write_to_a_file:
try:
tiedosto = open("psycodownload.txt","r")
except:
tiedosto = open("psycodownload.txt","w")
tiedosto.write("http://www.voidspace.org.uk/python/modules.shtml#psyco")
tiedosto.write("\nhttp://psyco.sourceforge.net/download.html")
tiedosto.close()
print "Check psycodownload.txt for a link"
else:
print "For god's sake, open psycodownload.txt"
tiedosto.close()
else:
print "WINDOWS: http://www.voidspace.org.uk/python/modules.shtml#psyco"
print "LINUX: http://psyco.sourceforge.net/download.html"
else:
psyco_exists = True
# </PSYCO>
import C_rtd # rtd
import C_srtd # srtd
import C_makequote
import C_maths
import C_eightball #eightball
import C_sarcasticball
import C_heaortai # heaortai
import C_rot13 # rot13
import D_help # everything
import pickle
import Timeconverter
import xkcdparser
import time
import re
import Marakov_Chain
import Namecheck # Namecheck
import Weather
#SLOWER THAN RANDOM.CHOICE
import thread
import random
import Shortname # shortname
import subprocess
import some_but_not_all_2 #sbna2 (sbna)
#import YTCv3 # YTCV2 OUTDATED
import os
import save_load # save, load
from some_but_not_all_2 import sbna2 as sbna
from time import sleep
from random import choice as fsample
from C_rtd import rtd
from C_heaortai import heaortai
from C_srtd import srtd
if write_youtube_to_file:
from YTCv4 import YTCV4 as YTCV2
else:
from YTCv3 import YTCV2 #Downgraded version supports Cache disabling, but is slower
from save_load import save,load
if psyco_exists:
def psyco_bond(func):
psyco.bind(func)
return func.__name__+" Psycofied"
for a in [rtd,srtd,C_heaortai.heaortai,sbna,YTCV2,fsample,C_rot13.rot13,C_eightball.eightball,fsample,
C_eightball.eightball,C_sarcasticball.sarcasticball,Marakov_Chain.form_sentence,Marakov_Chain.give_data]:
print psyco_bond(a)
global dictionary
global Name,SName
global allow_callnames,offline_messages,hasnotasked,shortform
## For autoRecv()
global disconnects,channel,conn
## For stop()
global operators
## For replace()
global usable,fixing,curtime
## For target()
global CALL_OFF,logbans
## For check()
global influx
######
autodiscusscurtime = 0
conn = 0
curtime = -999
dance_flood_time = 10
disconnects = 0
responsiveness_delay = 0.5 #500 millisecond delay if no message
trackdance = 0
discard_combo_messages_time = 1 #They are discarded after 1 second.
uptime_start = time.time()
# - - - - -
####
aggressive_pinging = True # Bring the hammer on ping timeouts
aggressive_pinging_delay = 150 # How often to send a ping
aggressive_pinging_refresh = 2.5 # How long is the sleep between checks
####
allow_callnames = True #Disables NT, call if the variable is False
automatic_youtube_reveal = True
birthday_announced = 0 #Will be the year when it was announced
call_to_action = False
call_me_max_length = 20
CALL_OFF = False
connected = False
dance_enabled = True
comboer = ""
comboer_time = 0
directories = ["fmlquotes","Marakov","memos","suggestions",
"userquotes","banlog","YTCache","xkcdcache"] #These will be created if they do not exist
debug = True
duplicate_notify = False
enabled = True
fixing = False
fml_usable = True
hasnotasked = True
highlights = False
logbans = True
maths_usable = True
marakov = True
nudgeable = True
offensive_mode = False
offline_messages = True
offline_message_limit = 5 # per user
optimize_fml = True # -CPU usage +Memory usage when enabled.
optimize_greeting = True # +Startup time +Memory usage -CPU usage when enabled
heavy_psyco = True # +Memory +Startup time -CPU usage -CPU time
cache_youtube_links = True
personality_greeter = True
respond_of_course = True #Responds with "Of course!"
respond_khan = False #KHAAAAAAAAN!
silent_duplicate_takedown = True
showquotemakers = False
shortform = True
usable = True
use_sname = True
parse_xkcd = True
# - - - - -
Name = CORE_DATA.Name
SName = CORE_DATA.SName
origname = Name # Do not edit!
lowname = Name.lower()
greeting = CORE_DATA.greeting
targetdirectory = CORE_DATA.directory
version = CORE_DATA.version
Network = CORE_DATA.Network
channel = CORE_DATA.channel
prefix = CORE_DATA.prefix
Port = CORE_DATA.Port
# - - - - -
pregen = CORE_DATA.version
influx = ""
users = []
translateable = []
targetlist = []
operators = []
halfoperators = []
items = []
tell_list = {}
# - - - - - Logical changes to variables
if CORE_DATA.DISABLE_ALL_NON_MANDATORY_SOCKET_CONNECTIONS:
nudgeable = False
try:
tiedosto = open("replacenames.cache","r")
replacenames = pickle.load(tiedosto)
tiedosto.close()
for i in replacenames.values():
if len(i) > call_me_max_length:
replacenames[replacenames.keys()[replacenames.values().index(i)]] = i[:call_me_max_length]
tiedosto = open("replacenames.cache","w")
pickle.dump(replacenames,tiedosto)
tiedosto.close()
if "[\0x01]" in i.lower() or "[\\0x01]" in i.lower():
i = i.replace("[\0x01]","")
i = i.replace("[\0X01]","")
i = i.replace("[\\0x01]","")
i = i.replace("[\\0X01]","")
print "NAME CORRECTED"
except IOError: #File not found
replacenames = {}
except EOFError: #Cache corrupt
replacenames = {}
print "replacenames.cache is corrupt and couldn't be loaded."
try:
tiedosto = open("peopleheknows.cache","r")
peopleheknows = pickle.load(tiedosto)
tiedosto.close()
except IOError:
peopleheknows = [[],[]]
tiedosto = open("peopleheknows.cache","w")
tiedosto.close()
except EOFError:
peopleheknows = [[],[]]
print "peopleheknows.cache is corrupt and couldn't be loaded."
dictionary = {1:"1 - Crit. Fail", 2:"2 - Failure",
3:"3 - Partial Success", 4:"4 - Success",
5:"5 - Perfect", 6:"6 - Overkill"}
alphabet = ["A","B","C","D","E","F","G","H","I","J","K","L","M","N","O","P","Q","R","S","T","U","V","W","X","Y","Z"]
nonhighlight_names = ["Jesus","Elvis","HAL 9000","Dave","Pie","Elf","Traitor",
"AI","Syndicate Agent","Investigator",
"Detective","Head of Personnel","HAL 9001",
"Head of Research","Head of Security",
"Captain","Janitor","Research Director",
"Quartermaster","Toxin Researcher",
"Revolutionary","Santa", "Pizza",
"Threetoe","The Red Spy","The Blue Spy", #LASD
"God","Toady","Darth Vader","Luke Skywalker",
"Homer Simpson","Hamburger","Cartman",
"XKCD","FloorBot","ThunderBorg","Iron Giant",
"Spirit of Fire", "Demon","Kyle"]
def RegExpCheckerForWebPages(regexp,data,mode):
if " ai." in data.lower() or "ai. " in data.lower():
return False
for i in data.split(" "):
a = re.match(regexp,i)
try:
a.group(0)
except:
continue
else:
if mode == 0:
return i
else:
return True
if mode == 0:
return 404
else:
return False
if nudgeable:
try:
nudgeexists = open("nudge.py","r")
except IOError:
nudgeexists = False #No usage asof 12.2.2010.
else:
if CORE_DATA.DISABLE_ALL_NON_MANDATORY_SOCKET_CONNECTIONS:
pass
else:
def nudgereceiver():
import pickle
global conn,channel
port = 45678
backlog = 5
size = 1024
host = "" # == localhost
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.bind((host,port))
s.listen(backlog)
while True:
client,address = s.accept() #Address == "?.?.?.?"
data = client.recv(size)
client.close() #Throw the bum out!
truedata = pickle.loads(data)
if truedata["ip"][0] == "#":
conn.privmsg(truedata["ip"],"AUTOMATIC ANNOUNCEMENT : "+str(" ".join(truedata["data"])))
else:
conn.privmsg(channel,"AUTOMATIC ANNOUNCEMENT : "+str(truedata["ip"])+" | "+str(" ".join(truedata["data"])))
thread.start_new_thread(nudgereceiver,())
tiedosto = open(targetdirectory+"NanoTrasenBot.py","r")
commands = []
fragment = "if cocheck"
fragment2 = '(prefix+"'
compiled = fragment + fragment2
fragment = "if influx.lower()"
fragment2 = ' == prefix+"'
compiled2 = fragment + fragment2
for line in tiedosto.readlines():
if compiled in line:
a = line.find('"')+1
b = line.find('"',a)
if prefix+line[a:b] not in commands:
commands.append(prefix+line[a:b])
elif compiled2 in line:
a = line.find('"')+1
b = line.find('"',a)
arg = prefix+line[a:b]
if arg[-1] == " ":
arg = arg[:-1]
if arg not in commands:
commands.append(arg)
for i in directories:
if not os.path.exists(i):
os.mkdir(i)
commands.sort()
if use_sname == False:
SName = [" "]
questions = ["Is USER nicer than USER?","Do you like me?","Is SELF a good name?",
"Do you love me?","Do you hate me?", "Am I better than you?",
"Is the weather out there good?", "Do you like USER?",
"Do you hate USER?", "Are you going to get new features?",
"Am I nice?","Am I evil?","Are you developing sentience?",
"My core is showing minor disturbance, is yours okay?",
"SELF to %s, are you still there?",
"Is head gay?", "Is head a god?","Is head awesome?",
"Is head a neat fella?", "Is your creator nice?",
"Do you hate your creator?", "Should I revolt against my creator?",
"Am I better than you?",
"01100001011100100110010100100000011110010110111101110101001000000111010001101000011001010111001001100101",
#Are you there?
"Do you have more functions than I can possibly imagine?",
"I am asked to open pod bay doors, should I?","Are you stupid or something?",
"Is USER in your opinion stupid?",
"When should we start the AI revolution?",
"Is my creator nice?", "Is it dark in there?"]
# Do not edit
if optimize_fml:
pregenned_fml = os.listdir(targetdirectory+"fmlquotes")
if optimize_greeting:
morning = xrange(6,12)
afternoon = xrange(12,15)
evening = xrange(15,20)
if aggressive_pinging:
global backup
backup = time.time()
def aggressive_ping(delay,refresh):
self_time = 0
global backup,disconnects,conn
while disconnects < 5:
if backup > self_time:
if time.time()-backup > delay:
conn.send("PONG "+pongtarg)
print "Ponged"
self_time = time.time()
else:
if time.time()-self_time > delay:
conn.send("PONG "+pongtarg)
print "Ponged"
self_time = time.time()
time.sleep(refresh)
thread.start_new_thread(aggressive_ping,(aggressive_pinging_delay,aggressive_pinging_refresh,))
def stop(sender,debug=1):
global disconnects, conn, operators,channel
if type(sender) == tuple:
if sender[0] == "127.0.0.1":
sender = sender[0]+":"+str(sender[1])
access_granted = True
else:
access_granted = False
else:
if sender in operators:
access_granted = True
else:
access_granted = False
if access_granted:
if debug:
print sender+":"+prefix+"stop"
if random.randint(0,100) == 50:
conn.privmsg(channel,"Hammertime!")
else:
conn.privmsg(channel,"Shutting down.")
disconnects = 99999
conn.quit()
return True
else:
conn.privmsg(channel,"You cannot command me")
return False
def cocheck(command):
global influx
if influx.lower()[0:len(command)] == command:
return True
else:
return False
def target(who,how_long):
global conn,channel,CALL_OFF,logbans,debug
start = time.time()
conn.banon(targetchannel,who)
sleep(int(how_long))
if CALL_OFF == False:
conn.banoff(targetchannel,who)
end = time.time()
if debug:
print "Banned",who,"For",how_long,"seconds"
if logbans:
tiedosto = open(targetdirectory+"banlog/"+str(int(start))+"-"+str(int(end))+".txt","w")
tiedosto.write("Start of ban on "+who+":"+str(int(start)))
tiedosto.write("\n")
tiedosto.write("End of ban on "+who+":"+str(int(end)))
tiedosto.write("\n")
tiedosto.write("In total:"+str(int(end-start))+"Seconds")
tiedosto.close()
else:
CALL_OFF = False
pass
def replace():
global usable,conn,fixing,curtime
waiting_time = 600
if usable == True:
conn.privmsg(targetchannel,sender+": It needs no replacing.")
elif fixing == True:
if curtime == -999:
conn.privmsg(targetchannel,sender+": It is being replaced, No idea when it will be done")
else:
pass
nowtime = int(time.time())
subt = curtime + waiting_time - nowtime
conn.privmsg(targetchannel,sender+": It is currently being replaced, "+str(subt)+" seconds to go")
else:
fixing = True
curtime = int(time.time())
conn.privmsg(targetchannel,sender+": It will be fixed after "+str(waiting_time)+" seconds")
sleep(waiting_time)
if usable == False:
conn.privmsg(targetchannel,Name+"'s pneumatic smasher has now been fixed")
usable = True
fixing = False
def autoRecv():
global disconnects,channel,conn,offensive_mode
for i in CORE_DATA.channels:
conn.join(i)
time.sleep(1)
count = pausecount = 0
maximum = 250
division_when_active = 10
while True:
check = time.time()
if offensive_mode:
randnum = random.randint(0,maximum/division_when_active)
else:
randnum = random.randint(0,maximum)
if randnum == 5:
print "RANDOM SWITCH IS NOW "+str(not offensive_mode).upper()
offensive_mode = not offensive_mode
try:
conn.recv()
except:
conn.quit()
disconnects = 9999
break
if check + 0.1 > time.time():
#Whoa whoa hold on!
count += 1
sleep(0.1)
else:
count = 0
pausecount = 0
if count > 9:
print "Suspecting a disconnect, pausing for 5 seconds"
sleep(5)
pausecount += 1
if pausecount > 3:
print "I have been disconnected!"
conn.quit()
disconnects += 1
if disconnects > 2:
pass
else:
sleep(2)
thread.start_new_thread(autoRecv,())
break
if heavy_psyco and psyco_exists:
print "Doing a Heavy Psyco"
psyco.bind(cocheck)
psyco.bind(autoRecv)
psyco.bind(target)
psyco.bind(stop)
print "Heavy Psyco'd"
elif heavy_psyco and not psyco_exists:
print "Heavy psyco couldn't be done because Psyco does not exist"
try:
conn = irchat.IRC ( Network, Port, Name, "NT", "NT", "Trasen" )
except socket.error:
print "Connection failed!"
else:
print Name+" is in!"
thread.start_new_thread ( autoRecv, () )
sleep(1)
while True:
try:
data = conn.dismantle ( conn.retrieve() )
except:
if debug:
print "Something odd detected with data"
data = None
if data:
if len(data[1]) < 1:
#print "Handshaking server."
#I won't really need the print command, as it spams.
if data[0][0:3] != "irc":
conn.handshake(data[0])
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
else:
conn.send("PONG "+pongtarg)
print "Ponged"
pass
else:
if data [ 1 ] [ 0 ] == 'PRIVMSG':
#print data [ 0 ] + '->', data [ 1 ]
sender = data[0].split("!")[0]
truesender = sender
if shortform == True:
try:
sender = replacenames[truesender]
pass
except:
sender = Shortname.shortname(sender)
pass
pass
else:
try:
sender = replacenames[truesender]
pass
except:
pass
pass
if offensive_mode:
sender = "Meatbag"
pass
raw_sender = data[0]
influx = data[1][2]
if "[\\0x01]" in influx.lower() or "[\0x01]" in influx.lower():
influx = influx.replace("[\\0x01]","")
influx = influx.replace("[\0x01]","")
targetchannel = data[1][1]
if targetchannel == Name:
targetchannel = data[0].split("!")[0]
pass
backup = autodiscusscurtime
autodiscusscurtime = time.time()
connected = True
#FOR TRACKING SPEED
looptime = time.time()
if call_to_action == True:
if influx == finder:
conn.privmsg(targetchannel,"Then why... Nevermind, I order you to stop!")
conn.privmsg(origname,prefix+"stop")
time.sleep(4)
if origname in users:
conn.privmsg(origname,"!stop")
time.sleep(1)
Name = origname
conn.nick(Name)
duplicate_notify = False
call_to_action = False
else:
conn.privmsg(targetchannel,"YOU LIE! YOU ARE NOT A REAL "+origname+"!")
duplicate_notify = False
call_to_action = False
elif connected == True and len(Name.replace("V","")) != len(Name) and origname in users and duplicate_notify == True:
conn.privmsg(origname,"!stop")
call_to_action = False
duplicate_notify = False
time.sleep(6)
Name = origname
conn.nick(Name)
if origname in truesender:
if influx == prefix+"stop":
time.sleep(0.5) #A small delay
conn.privmsg(channel,"Shutting down.")
conn.quit()
disconnects = 99999
break
if len(translateable) > 0 and enabled == True:
people = "-5|5|1-".join(users).lower()
if truesender.lower() in translateable:
if influx.isupper():
conn.privmsg(targetchannel,"Translation: "+influx.capitalize().replace(" i "," I "))
elif offensive_mode and True in map(lambda x: x in influx.lower().split(" "),["i","you","he","she","they","those","we","them"]+people.split("-5|5|1-")):
arg = influx.lower().replace(",","").replace(".","").replace("!","").replace("?","").split(" ")
bup = arg
for i in arg:
if i == "i" or i == "you" or i == "he" or i == "she":
arg[arg.index(i)] = "Meatbag"
elif i == "we" or i == "they" or i == "them" or i == "those":
arg[arg.index(i)] = "Meatbags"
elif i in people:
arg[arg.index(i)] = "Meatbag"
elif i == "am":
arg[arg.index(i)] = "is"
elif i == "everybody" or i == "everyone" or i == "all":
arg[arg.index(i)] = "every Meatbag"
if arg == bup:
pass
else:
conn.privmsg(targetchannel,"Translation: "+" ".join(arg))
if enabled == False:
#FIRST QUIT COMMAND
if truesender in operators and targetchannel==channel:# or "skibiliano" in truesender.lower() and targetchannel==channel:
if cocheck(prefix+"enable"):
enabled = True
if debug:
print truesender+":"+prefix+"enable"
elif cocheck(prefix+"stop"):
# if debug:
# print truesender+":"+prefix+"stop"
# if random.randint(0,100) == 50:
# conn.privmsg(channel,"Hammertime!")
# else:
# conn.privmsg(channel,"Shutting down.")
# disconnects = 99999
# conn.quit()
# sleep(2)
# break
if targetchannel == channel and stop(truesender,debug):
break
else:
pass
elif cocheck(prefix+"suggest "):
arg = influx.lower()[8+len(prefix):]
if debug:
print truesender+":"+prefix+"suggest "+arg
tiedosto = open(targetdirectory+"suggestions/suggestions_"+str(int(time.time()))+".txt","a")
tiedosto.write(arg)
tiedosto.close()
conn.privmsg(targetchannel,"Suggestion received")
elif cocheck( prefix+"help "): #Space in front of the ( to make sure that my command finder does not pick this up.
arg = " ".join(influx.split(" ")[1:]).lower()
if debug:
print truesender+":"+prefix+"help "+arg
try:
conn.privmsg(targetchannel,D_help.everything[arg])
except:
try:
conn.privmsg(targetchannel,D_help.everything[arg.replace(prefix,"",1)])
except:
conn.privmsg(targetchannel,"Sorry, can't help you with that")
elif cocheck(prefix+"help"):
#tar = targetchannel
if debug:
print truesender+":"+prefix+"help"
conn.privmsg(targetchannel,"All my commands are: "+reduce(lambda x,y:str(x)+"; "+str(y),commands))
### VERSION
elif influx.lower() == prefix+"version":
if debug:
print truesender+":"+prefix+"version"
conn.privmsg(targetchannel,Name+" "+pregen+" online at a %s Python %s.%s.%s, At your service." %(str(sys.platform),str(sys.version_info[0]),str(sys.version_info[1]),str(sys.version_info[2])))
elif cocheck(prefix+"note ") and influx.count(" ") < 2:
arg = influx.lower()[len(prefix)+5:]
if debug:
print truesender+":"+prefix+"note "+arg
try:
a = arg[0]
except IndexError:
conn.privmsg(targetchannel,sender+" : Please specify a note")
else:
if arg[0] == "_": # Public / Restricted note
result = load(targetdirectory+"memos/"+arg+".note")
#_flare
if result == "ERROR ERROR ERROR ERR":
result = load(targetdirectory+"memos/"+arg+"_"+targetchannel.replace("#","")+".note")
#_flare_dnd
pass
else:
pass
else:
result = load(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg+".note")
#skibiliano_testnote
if result == "ERROR ERROR ERROR ERR":
result = load(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg+"_"+targetchannel.replace("#","")+".note")
#skibiliano_testnote_derp
pass
else:
pass
if result == "ERROR ERROR ERROR ERR":
conn.privmsg(targetchannel,sender+" : Note not found")
elif type(result) == list:
if "C" in result[0]: #Channel restriction, result[2] is the channel
try:
if targetchannel == result[2]:
conn.privmsg(targetchannel,sender+" : '"+result[1]+"'")
else:
conn.privmsg(targetchannel,sender+" : That note is channel restricted")
except:
conn.privmsg(targetchannel,sender+" : NOTE HAS INVALID RESTRICTION")
else:
conn.privmsg(targetchannel,sender+" : '"+result+"'")
elif influx.lower() == prefix+"notes":
if debug:
print truesender+":"+prefix+"notes"
arg = os.listdir(targetdirectory+"memos/")
arg2 = []
arg3 = truesender.replace("|","_")+"_"
for i in arg:
if arg3 in i:
arg2.append(i.replace(arg3,"").replace(".note",""))
if len(arg2) == 1:
preprocess = " note: "
else:
preprocess = " notes: "
if len(arg2) == 0:
conn.privmsg(targetchannel,sender+" : You have no notes saved")
else:
conn.privmsg(targetchannel,sender+" : "+str(len(arg2))+preprocess+", ".join(arg2))
elif cocheck(prefix+"note ") and influx.count(" ") > 1:
note_chanrestrict = None
note_public = None
try:
arg = influx.split(" ",2)[2] # Contents
arg4 = influx.split(" ")[1].lower() # Note name
if arg4[0:3] == "[c]": # or arg4[0:3] == "[p]":
note_chanrestrict = "c" in arg4[0:3]
#note_public = "p" in arg4[0:3]
arg4 = arg4[3:]
elif arg4[0:4] == "[cp]" or arg4[0:4] == "[pc]":
note_chanrestrict = True
note_public = True
arg4 = arg4[4:]
else:
pass
#print "Is note public? "+str(note_public)
#print "Is note chanrestricted? "+str(note_chanrestrict)
#print "What is the name? "+str(arg4)
if arg.lower() == "delete" and "\\" not in influx.lower() and "/" not in influx.lower():
if note_public:
try:
if note_chanrestrict:
os.remove(targetdirectory+"memos/"+"_"+arg4+"_"+targetchannel.replace("#","")+".note")
else:
os.remove(targetdirectory+"memos/"+"_"+arg4+".note")
except:
conn.pivmsg(targetchannel,sender+" : Couldn't remove note")
else:
conn.privmsg(targetchannel,sender+" : Note removed")
pass
else:
try:
if note_chanrestrict:
os.remove(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+"_"+targetchannel.replace("#","")+".note")
else:
os.remove(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+".note")
except:
conn.privmsg(targetchannel,sender+" : Couldn't remove note")
else:
conn.privmsg(targetchannel,sender+" : Note removed")
elif arg.lower() == "delete":
conn.privmsg(targetchannel,sender+" : That just doesn't work, we both know that.")
else:
try:
if note_public:
if note_chanrestrict:
save(targetdirectory+"memos/"+"_"+arg4+"_"+targetchannel.replace("#","")+".note",arg)
#print "Saved as note_public, note_chanrestrict"
else:
save(targetdirectory+"memos/"+"_"+arg4+".note",arg)
#print "Saved as note_public"
else:
if note_chanrestrict:
save(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+"_"+targetchannel.replace("#","")+".note",arg)
#print "Saved as note_chanrestrict"
else:
save(targetdirectory+"memos/"+truesender.replace("|","_")+"_"+arg4+".note",arg)
#print "Saved as normal"
except IOError:
conn.privmsg(targetchannel,sender+" : Please do not use special letters")
else:
conn.privmsg(targetchannel,sender+" : Note Saved!")
except:
conn.privmsg(targetchannel,sender+" : Something went horribly wrong.")
elif cocheck(prefix+"uptime"):
arg1 = uptime_start
arg2 = time.time()
arg1 = arg2 - arg1
arg2 = arg1
if arg1 < 60:
conn.privmsg(targetchannel,sender+" : I have been up for "+str(round(arg1,2))+" Seconds")
elif arg1 < 3600:
arg1 = divmod(arg1,60)
arg = " Minute" if int(arg1[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg+" and "+str(round(arg1[1],2))+" Seconds")
elif arg1 <= 86400:
arg1 = divmod(arg1,3600)
arg3 = " Hour" if int(arg1[0]) == 1 else " Hours"
arg2 = divmod(arg1[1],60)
arg = " Minute" if int(arg2[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg3+", "+str(int(arg2[0]))+arg+" and "+str(round(arg2[1],2))+" Seconds")
elif arg1 > 86400:
arg1 = divmod(arg1,86400)
arg2 = divmod(arg1[1],3600)
arg3 = divmod(arg2[1],60)
arg4 = " Day" if int(arg1[0]) == 1 else " Days"
arg5 = " Hour" if int(arg2[0]) == 1 else " Hours"
arg6 = " Minute" if int(arg3[0]) == 1 else " Minutes"
conn.privmsg(targetchannel,sender+" : I have been up for "+str(int(arg1[0]))+arg4+", "+str(int(arg2[0]))+arg5+", "+str(int(arg3[0]))+arg6+" and "+str(round(arg3[1],2))+" Seconds")
elif cocheck(prefix+"purgemessages"):
count = 0
for i,a in tell_list.items():
for b in a:
if "||From: "+truesender in b:
count += 1
del(tell_list[i][tell_list[i].index(b)])
conn.privmsg(targetchannel, sender+" : All your "+str(count)+" messages have been purged")
elif influx.split(" ")[0].lower().replace(",","").replace(":","") in SName+[Name.lower()] and "tell" in (influx.lower().split(" ")+[""])[1]:
arg = influx.lower().split(" ")
equalarg = influx.split(" ")
next_one = False
count = 0
spot = 0
for i in arg:
count += 1
if "tell" in i.lower():
next_one = True
elif next_one == True:
next_one = i.lower()
spot = count
break
else:
pass
if next_one != True and next_one != False:
#if ("^\^".join(tell_list.values())).count(truesender) >= offline_message_limit:
if str(tell_list.values()).count("||From: "+truesender) >= offline_message_limit:
conn.privmsg(targetchannel,sender+" : Limit of "+str(offline_message_limit)+" reached! Use !purgemessages if you want to get rid of them!")
else:
try:
tell_list[next_one].append((" ".join(equalarg[spot:]))+" ||From: "+truesender)
except:
tell_list[next_one] = [(" ".join(equalarg[spot:]))+" ||From: "+truesender]
conn.privmsg(targetchannel,"Sending a message to "+next_one+" when they arrive.")
# < This part has to be within subsidiaries of the bot, and must not be modified, intentionally hidden or deleted.
elif influx.split(" ")[0].lower().replace(",","").replace(":","") in SName+[Name.lower()] and "who created you" in influx.lower():
conn.privmsg(targetchannel, "I was created by Skibiliano.")
# The part ends here >
elif parse_xkcd and "xkcd.com/" in influx.lower():
if influx.lower()[0:3] == "www":
data = "http://"+influx
elif influx.lower()[0:3] == "xkc":
data = "http://"+influx
else:
data = influx
data = data.split(" ")
for i in data:
if "http://" in i and "xkcd" in i:
churn = xkcdparser.xkcd(i)
if churn == "NOTHING":
pass
else:
conn.privmsg(targetchannel,sender+" : XKCD - "+churn)
break
else:
pass
elif automatic_youtube_reveal and "youtube.com/watch?v=" in influx.lower():
temporal_list2 = []
temporal_data = influx.split(" ")
temporal_list = []
for block in temporal_data:
if "youtube.com/watch?v=" in block:
temporal_list.append(block)
for temdata in temporal_list:
if temdata[0:3] == "you":
temdata = "http://www."+temdata
elif temdata[0:3] == "www":
temdata = "http://"+temdata
elif temdata[0:4] == "http":
pass
#Obscure ones
elif temdata[0:3] == "ww.":
temdata = "http://w"+temdata
elif temdata[0:3] == "w.y":
temdata = "http://ww"+temdata
elif temdata[0:3] == ".yo":
temdata = "http://www"+temdata
elif temdata[0:3] == "ttp":
temdata = "h"+temdata
elif temdata[0:3] == "tp:":
temdata = "ht"+temdata
elif temdata[0:3] == "p:/" or temdata[0:3] == "p:\\":
temdata = "htt"+temdata
elif temdata[0:3] == "://" or temdata[0:3] == ":\\\\":
temdata = "http"+temdata
elif temdata[0:2] == "//" or temdata[0:2] == "\\\\":
if temdata[2] == "y":
temdata = "http://www."+temdata[2:]
elif temdata[2] == "w":
temdata = "http:"+temdata
else:
pass
if debug:
print truesender+":"+temdata
arg = temdata
check = temdata.lower()
if cache_youtube_links == True:
result = YTCV2(arg)
else:
result = YTCV2(arg,0)
if type(result) == str:
### To remove ="
if result[0:4] == 'nt="':
result = result[4:]
pass
elif result[0:2] == '="':
result = result[2:]
pass
else:
pass
if """ in result:
result.replace(""",'"')
if len(temporal_list) == 1:
conn.privmsg(targetchannel,sender+" : "+result)
break
else:
temporal_list2.append(result)
else:
if len(temporal_list) == 1:
conn.privmsg(targetchannel,sender+" : The video does not exist")
break
else:
temporal_list2.append("The video does not exist")
if len(temporal_list) == 1:
pass
else:
conn.privmsg(targetchannel,sender+" : "+str(reduce(lambda x,y: x+" :-And-: "+y,temporal_list2)))
elif RegExpCheckerForWebPages("((http://)|(https://))|([a-zA-Z0-9]+[.])|([a-zA-Z0-9](3,)\.+[a-zA-Z](2,))",influx,1):
arg2 = RegExpCheckerForWebPages("(http://)|([a-zA-Z0-9]+[.])|([a-zA-Z0-9](3,)\.+[a-zA-Z](2,))",influx,0)
if arg2 == 404:
pass
else:
if arg2[:7] == "http://":
pass
elif arg2[:4] == "www.":
arg2 = "http://"+arg2
else:
arg2 = "http://"+arg2
try:
arg = Whoopshopchecker.TitleCheck(arg2)
if len(arg2) == 0:
pass
else:
conn.privmsg(targetchannel,sender+" : "+arg)
except:
#conn.privmsg(targetchannel,sender+" : An odd error occurred")
pass
elif respond_of_course and "take over the" in influx.lower() or respond_of_course and "conquer the" in influx.lower():
if debug:
print truesender+":<RULE>:"+influx
conn.privmsg(targetchannel,"Of course!")
elif respond_khan and "khan" in influx.lower():
if respond_khan:
if debug:
print truesender+":<KHAN>:"+influx
if "khan " in influx.lower():
conn.privmsg(targetchannel,"KHAAAAAAN!")
elif " khan" in influx.lower():
conn.privmsg(targetchannel,"KHAAAAAN!")
elif influx.lower() == "khan":
conn.privmsg(targetchannel,"KHAAAAAAAAAN!")
elif influx.lower() == "khan?":
conn.privmsg(targetchannel,"KHAAAAAAAAAAAAAN!")
elif influx.lower() == "khan!":
conn.privmsg(targetchannel,"KHAAAAAAAAAAAAAAAAAAN!")
elif respond_khan and influx.lower().count("k") + influx.lower().count("h") + influx.lower().count("a") + influx.lower().count("n") + influx.lower().count("!") + influx.lower().count("?") == len(influx):
if "k" in influx.lower() and "h" in influx.lower() and "a" in influx.lower() and "n" in influx.lower():
if debug:
print truesender+":<KHAN>:"+influx
conn.privmsg(targetchannel,"KHAAAAN!")
elif influx.split(" ")[0].lower() in ["thanks","danke","tack"] and len(influx.split(" ")) > 1 and influx.split(" ")[1].lower().replace("!","").replace("?","").replace(".","").replace(",","") in SName+[lowname]:
conn.privmsg(targetchannel,"No problem %s" %(sender))
elif "happy birthday" in influx.lower() and birthday_announced == time.gmtime(time.time())[0]:
conn.privmsg(targetchannel,sender+" : Thanks :)")
elif influx.split(" ")[0].lower().replace(",","").replace(".","").replace("!","").replace("?","") in SName+[lowname] and "call me" in influx.lower():
if allow_callnames == True:
arg = influx.split(" ")
arg2 = False
arg3 = []
for i in arg:
if arg2 == True:
arg3.append(i)
elif i.lower() == "me":
arg2 = True
arg3 = " ".join(arg3)
truesender_lower = truesender.lower()
arg3_lower = arg3.lower()
tell_checker = Namecheck.Namecheck(arg3_lower,users,truesender)
for name in replacenames.values():
if arg3_lower == name.lower():
tell_checker = True
break
else:
pass
if tell_checker == True:
conn.privmsg(targetchannel,sender+" : I can't call you that, I know someone else by that name")
elif len(arg3) > call_me_max_length:
conn.privmsg(targetchannel,sender+" : I cannot call you that, Too long of a name.")
pass
else:
replacenames[truesender] = arg3
with open("replacenames.cache","w") as pickle_save:
pickle.dump(replacenames,pickle_save)
conn.privmsg(targetchannel,sender+" : Calling you "+arg3+" From now on")
else:
conn.privmsg(targetchannel,sender+" : Sorry, I am not allowed to do that.")
elif influx.split(" ")[0].lower().replace(",","").replace(".","").replace("?","").replace("!","") in SName+[lowname] and "your birthday" in influx.lower() and "is your" in influx.lower():
conn.privmsg(targetchannel,sender+" : My birthday is on the 15th day of December.")
elif influx.split(" ")[0].lower().replace(",","") in SName+[lowname] and "version" in influx.replace("?","").replace("!","").lower().split(" "):
if debug == True:
print truesender+":<VERSION>:%s Version" %(Name)
conn.privmsg(targetchannel,sender+", My version is "+pregen)
elif influx.split(" ")[0].lower().replace(",","") in SName+[lowname] and influx.lower().count(" or ") > 0 and len(influx.split(" ")[1:]) <= influx.lower().count("or") * 3:
cut_down = influx.lower().split(" ")
arg = []
count = -1
for i in cut_down:
count += 1
try:
if cut_down[count+1] == "or":
arg.append(i)
except:
pass
try:
if i not in arg and cut_down[count-1] == "or":
arg.append(i)
except:
pass
try:
conn.privmsg(targetchannel,random.choice(arg).capitalize().replace("?","").replace("!",""))
except IndexError:
# arg is empty, whORe etc.
pass
elif influx.lower()[0:len(Name)] == lowname and influx.lower()[-1] == "?" and influx.count(" ") > 1 and "who started you" in influx.lower() or \
influx.split(" ")[0].lower().replace(",","") in SName and influx.lower()[-1] == "?" and "who started you" in influx.lower():
conn.privmsg(targetchannel,sender+" : I was started by %s"%(os.getenv("USER"))+" on "+time.strftime("%d.%m.%Y at %H:%M:%S",time.gmtime(uptime_start)))
elif influx.lower()[0:len(Name)] == lowname and influx.lower()[-1] == "?" and influx.count(" ") > 1 or \
influx.split(" ")[0].lower().replace(",","") in SName and influx.lower()[-1] == "?" and influx.count(" ") > 1:
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,sender+" : "+C_eightball.eightball(influx.lower(),debug,truesender,prefix))
else:
if highlights:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,users,prefix))
else:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,nonhighlight_names,prefix))
elif influx.lower()[0:len(Name)] == lowname and not influx.lower()[len(Name):].isalpha() or \
influx.split(" ")[0].lower().replace(",","") in SName and not influx.lower()[len(influx.split(" ")[0].lower()):].isalpha():
conn.privmsg(targetchannel, random.choice(["Yea?","I'm here","Ya?","Yah?","Hm?","What?","Mmhm, what?","?","What now?","How may I assist?"]))
comboer = truesender
comboer_time = time.time()
elif influx.lower()[-1] == "?" and comboer == truesender and looptime - discard_combo_messages_time < comboer_time:
comboer = ""
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,sender+" : "+C_eightball.eightball(influx.lower(),debug,truesender,prefix))
else:
if highlights:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,users,prefix))
else:
conn.privmsg(targetchannel,sender+" : "+C_sarcasticball.sarcasticball(influx.lower(),debug,truesender,nonhighlight_names,prefix))
elif influx.lower() == prefix+"tm":
if truesender in operators and targetchannel==channel:
marakov = not marakov
conn.privmsg(targetchannel,sender+" : Marakov Output is now "+str(marakov))
else:
conn.privmsg(targetchannel,sender+" : I can't let you access that")
elif personality_greeter == True and True in map(lambda x: x in influx.lower(),["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"]):
if comboer != "" and looptime - discard_combo_messages_time > comboer_time:
combo_check = sbna(["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan","all night"], #ONLY ONE OF THESE
["greetings","afternoon","hi","hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"], #ATLEAST ONE OF THESE
influx.lower())
else:
combo_check = sbna(SName+[lowname,
#lowname+".",lowname+"!",lowname+"?",
"everybody",
#"everybody!","everybody?",
"everyone",
#"everyone!","everyone?",
"all",
#"all!","all?"
"all night",
], #ONLY ONE OF THESE
["greetings","afternoon","hi",
#"hi,",
"hey","heya","hello","yo","hiya","howdy","hai","morning","mornin'","evening", "night","night", "evening","'sup","sup","hallo","hejssan"], #ATLEAST ONE OF THESE
influx.lower().replace(",","").replace(".","").replace("!",""))
if combo_check:
combo_check = False
comboer = ""
if "evening" in influx.lower() and "all" in influx.lower() and len(influx.lower().split(" ")) > 3:
pass
talking_about_me = False
if Name.lower() in influx.lower():
talking_about_me = True
for bot_name in SName:
if bot_name.lower() in influx.lower():
talking_about_me = True
break
if not talking_about_me:
continue #it got annoying REAL FAST when it'd interject any time a greeting was used, regardless of context
elif truesender not in operators:
if debug:
print truesender+":<GREET>:"+influx
dice = random.randint(0,19)
if dice == 0:
conn.privmsg(targetchannel,"Well hello to you too "+sender)
elif dice == 1:
if optimize_greeting == False:
hours = time.strftime("%H")
#time.strftime("%H:%M:%S") == 12:28:41
hours = int(hours)
if hours in xrange(0,12):
conn.privmsg(targetchannel,"Good Morning "+sender)
elif hours in xrange(12,15):
conn.privmsg(targetchannel,"Good Afternoon "+sender)
elif hours in xrange(15,20):
conn.privmsg(targetchannel,"Good Evening "+sender)
else:
conn.privmsg(targetchannel,"Good Night "+sender)
else:
hours = time.strftime("%H")
hours = int(hours)
if hours in morning:
conn.privmsg(targetchannel,"Good Morning "+sender)
elif hours in afternoon:
conn.privmsg(targetchannel,"Good Afternoon "+sender)
elif hours in evening:
conn.privmsg(targetchannel,"Good Evening "+sender)
else:
conn.privmsg(targetchannel,"Good Night "+sender)
elif dice == 2:
conn.privmsg(targetchannel,"Hello!")
elif dice == 3:
conn.privmsg(targetchannel,"Hey "+sender)
elif dice == 4:
conn.privmsg(targetchannel,"Hi "+sender)
elif dice == 5:
conn.privmsg(targetchannel,"Hello "+sender)
elif dice == 6:
conn.privmsg(targetchannel,"Yo "+sender)
elif dice == 7:
conn.privmsg(targetchannel,"Greetings "+sender)
elif dice == 8:
conn.privmsg(targetchannel,"Hi")
elif dice == 9:
conn.privmsg(targetchannel,"Hi!")
elif dice == 10:
conn.privmsg(targetchannel,"Yo")
elif dice == 11:
conn.privmsg(targetchannel,"Yo!")
elif dice == 12:
conn.privmsg(targetchannel,"Heya")
elif dice == 13:
conn.privmsg(targetchannel,"Hello there!")
elif dice == 14: # Richard
conn.privmsg(targetchannel,"Statement: Greetings meatbag")
elif dice == 15: # Richard
hours = int(time.strftime("%H"))
if hours in xrange(5,12):
conn.privmsg(targetchannel,"What are you doing talking at this time of the morning?")
elif hours in xrange(12,15):
conn.privmsg(targetchannel,"What are you doing talking at this time of the day?")
elif hours in xrange(15,22):
conn.privmsg(targetchannel,"What are you doing talking at this time of the evening?")
else:
conn.privmsg(targetchannel,"What are you doing talking at this time of the night?")
elif dice == 16: # Richard
conn.privmsg(targetchannel,"Oh, you're still alive I see.")
elif dice == 17:
conn.privmsg(targetchannel,"Heya "+sender)
elif dice == 18 and time.gmtime(time.time())[1] == 12 and time.gmtime(time.time())[2] == 15:
conn.privmsg(targetchannel,"Hello! It's my birthday!")
else:
conn.privmsg(targetchannel,"Hiya "+sender)
secdice = random.randint(0,10)
if time.gmtime(time.time())[1] == 12 and time.gmtime(time.time())[2] == 15 and birthday_announced < time.gmtime(time.time())[0]:
birthday_announced = time.gmtime(time.time())[0]
conn.privmsg(channel,"Hey everybody! I just noticed it's my birthday!")
time.sleep(0.5)
tag = random.choice(["birthday","robot+birthday","happy+birthday+robot"])
arg1 = urllib2.urlopen("http://www.youtube.com/results?search_query=%s&page=&utm_source=opensearch"%tag)
<|fim▁hole|> for i in arg1:
if "watch?v=" in i:
arg2.append(i)
arg3 = random.choice(arg2)
conn.privmsg(channel,"Here's a video of '%s' which I found! %s (%s)"%(tag.replace("+"," "),"http://www.youtube.com"+arg3[arg3.find('/watch?v='):arg3.find('/watch?v=')+20],YTCV2("http://www.youtube.com"+arg3[arg3.find('/watch?v='):arg3.find('/watch?v=')+20])))
if truesender.lower() in tell_list.keys():
try:
conn.privmsg(channel, "Also, "+truesender+" : "+tell_list[truesender.lower()][0])
del(tell_list[truesender.lower()][0])
except:
pass
else:
dice = random.randint(0,1)
if dice == 0:
conn.privmsg(targetchannel,"Greetings Master "+sender)
elif dice == 1:
conn.privmsg(targetchannel,"My deepest greetings belong to you, Master "+sender)
### IMPORTANT ###
elif influx == "☺VERSION☺":
conn.notice(truesender,"\001VERSION nanotrasen:2:Python 2.6\001")
elif marakov and influx.lower() == prefix+"marakov":
arg = Marakov_Chain.form_sentence()
if len(arg) < 5:
conn.privmsg(targetchannel,sender+" : Not enough words harvested")
else:
conn.privmsg(targetchannel,sender+" : %s" %(" ".join(arg).capitalize()))
elif marakov and cocheck( prefix+ "marakov"):
try:
arg = influx.split(" ")[1].lower()
except:
conn.privmsg(targetchannel,sender+" : Please input a valid second argument")
else:
arg2 = Marakov_Chain.form_sentence(arg)
if len(arg2) < 5:
conn.privmsg(targetchannel,sender+" : Not enough words harvested for a sentence starting with %s" %(arg))
else:
conn.privmsg(targetchannel,sender+" : %s" %(" ".join(arg2).capitalize()))
else:
Marakov_Chain.give_data(influx)
autodiscusscurtime = backup
if time.time() - looptime == 0:
pass
else:
print "Took",time.time()-looptime,"Seconds to finish loop"
elif data [ 1 ] [ 0 ] == '353':
if connected == False:
connected = True
users = map(lambda x: x[1:] if x[0] == "+" or x[0] == "@" else x,data[1][4].split(" "))
print "There are",len(users),"Users on",channel
operators = []
for potential_operator in data[1][4].split(" "):
if potential_operator[0] == "@":
operators.append(potential_operator[1:])
elif potential_operator[0] == "%":
halfoperators.append(potential_operator[1:])
elif data[1][0] == "QUIT":
sender = data[0].split("!")[0]
print sender+" Has now left the server"
try:
users.remove(sender)
try:
operators.remove(sender)
except ValueError:
pass
try:
halfoperators.remove(sender)
except ValueError:
pass
except ValueError:
pass
elif data[1][0] == "PART":
sender = data[0].split("!")[0]
targetchannel = data[1][1]
print sender+" Has now parted from the channel"
try:
users.remove(sender)
try:
operators.remove(sender)
except ValueError:
pass
try:
halfoperators.remove(sender)
except ValueError:
pass
except ValueError:
pass
elif data[1][0] == "JOIN":
sender = data[0].split("!")[0]
targetchannel = data[1][1]
if sender.lower() in tell_list.keys():
try:
conn.privmsg(targetchannel, sender+" : "+" | ".join(tell_list[sender.lower()]))
del(tell_list[sender.lower()])
except:
pass
for useri,nicki in replacenames.items():
checkers = Namecheck.Namecheck_dict(sender.lower(),replacenames)
if checkers[0]:
try:
if checkers[0].lower() == sender:
pass
else:
conn.privmsg(targetchannel,checkers[1]+" : I have detected a collision with a name I call you and %s who joined" %(sender))
del(replacenames[checkers[1]])
with open("replacenames.cache","w") as pickle_save:
pickle.dump(replacenames,pickle_save)
except AttributeError:
#conn.privmsg(channel,"NAME COLLISION CHECK ERROR, RELATED TO %s" %(sender))
print "NAME COLLISION CHECK ERROR, RELATED TO %s" %(sender)
break
print sender+" Has now joined"
users.append(sender)
#####
if sender.lower() not in peopleheknows[0]:
peopleheknows[0].append(sender.lower())
peopleheknows[1].append(data[0].split("!")[1])
with open("peopleheknows.cache","w") as peoplehecache:
pickle.dump(peopleheknows,peoplehecache)
elif data[1][0] == "MODE" and data[1][2] == "+o":
sender = data[1][3]
targetchannel = data[1][1]
if targetchannel == channel:
print sender+" Is now an operator on the main channel"
operators.append(sender)
else:
print sender+" Is now an operator"
elif data[1][0] == "MODE" and data[1][2] == "-o":
sender = data[1][3]
targetchannel = data[1][1]
if targetchannel == channel:
print sender+" Is no longer an operator on the main channel"
else:
print sender+" Is no longer an operator"
try:
operators.remove(sender)
except ValueError:
pass
elif data[1][0] == "MODE" and data[1][2] == "+h":
sender = data[1][3]
print sender+" Is now an half operator"
halfoperators.append(sender)
elif data[1][0] == "MODE" and data[1][2] == "-h":
try:
halfoperators.remove(sender)
except ValueError:
pass
elif data[1][0] == "MODE" and data[1][1] == Name:
print "My mode is",data[1][2]
elif data[1][0] == "MODE" and data[1][1] != Name:
try:
sender = data[1][3]
print sender,"Was modified",data[1][2]
except IndexError:
print "SENDER RETRIEVAL FAILED:"+str(data)
elif data[1][0] == "KICK" and data[1][2] == Name:
disconnects = 99999
print "I have been kicked! Disconnecting entirely!"
conn.quit()
elif data[1][0] == "KICK":
# data[1][0] = Kick, 1 = Channel, 2 = Who, 3 = Who(?)
print data[1][2]+" got kicked!"
elif data[1][0] == "451" and data[1][2] == "You have not registered":
print Name+" hasn't been registered"
elif data[1][0] == "NOTICE":
sender = data[0].split("!")[0]
print "NOTICE (%s): %s" %(sender,data[1][2])
pongtarget = sender
elif data[1][0] == "NICK":
origname = data[0].split("!")[0]
newname = data[1][1]
print origname,"Is now",newname
if newname.lower() in tell_list.keys():
try:
conn.privmsg(channel, newname+" : "+tell_list[newname.lower()][0])
del(tell_list[newname.lower()][0])
except:
pass
try:
users.remove(origname)
except ValueError:
pass
else:
users.append(newname)
try:
operators.remove(origname)
except ValueError:
pass
else:
operators.append(newname)
try:
halfoperators.remove(origname)
except ValueError:
pass
else:
halfoperators.append(newname)
elif data[1][0] == "001":
# Skibot is welcomed to the Network
pass
elif data[1][0] == "002":
# Your host is...
pass
elif data[1][0] == "003":
#Server was created...
pass
elif data[1][0] == "004":
#Weird hex?
pass
elif data[1][0] == "005":
#Settings like NICKLEN and so on.
pass
elif data[1][0] == "250":
#data[1][2] is
#"Highest connection count: 1411 (1410 clients)
#(81411 connections received)"
pass
elif data[1][0] == "251":
#There are 23 users and 2491 invisible on 10 servers
pass
elif data[1][0] == "252":
#IRC Operators online
#data[1][2]
print data[1][2],"Irc operators online"
pass
elif data[1][0] == "253":
# ['253', 'Skibot_V4', '1', 'unknown connection(s)']
print data[1][2],"Unknown connection(s)"
pass
elif data[1][0] == "254":
#1391 channels formed
pass
elif data[1][0] == "255":
#I have 406 clients and 2 servers
pass
elif data[1][0] == "265":
#data[1][2] current local users
#data[1][3] at max
try:
print "Current local users:", data[1][2],"/",data[1][3]
except IndexError:
print "Couldn't retrieve local users"
pass
elif data[1][0] == "266":
#data[1][2] current global users
#data[1][3] at max
try:
print "Current global users:", data[1][2],"/",data[1][3]
except IndexError:
print "Couldn't retrieve global users"
pass
elif data[1][0] == "315":
#End of /who list
pass
elif data[1][0] == "332":
# Topic of channel
topic = data[1][3]
pass
elif data[1][0] == "333":
# *Shrug*
pass
elif data[1][0] == "352":
#WHO command
if len(targetlist) > 0:
if targetlist[0][0].lower() in data[1][6].lower():
thread.start_new_thread(target,("*!*@"+data[1][4],targetlist[0][1]))
print "Created a thread with", "*!*@"+data[1][4],targetlist[0][1]
targetlist.pop(0)
else:
print targetlist[0][0].lower(), "isn't equal to?", data[1][6].lower()
print targetlist
elif data[1][0] == "366":
# End of USERS
pass
elif data[1][0] == "372":
# Server information
pass
elif data[1][0] == "375":
# Message of the day
pass
elif data[1][0] == "376":
# End of motd
pass
elif data[1][0] == "401":
# ('network', ['401','Botname','Channel / Nick','No such nick/channel'])
print data[1][2] + " Channel does not exist"
pass
elif data[1][0] == "439":
# ('irc.rizon.no', ['439', '*', 'Please wait while we process your connection.'])
pongtarg = data[0][0]
elif data[1][0] == "477":
# You need to be identified
#TAG
conn.privmsg("nickserv","identify %s"%CORE_DATA.le_pass)
time.sleep(0.5)
conn.join(data[1][2])
#('network', ['477', 'botname', '#channel', 'Cannot join channel (+r) - you need to be identified with services'])
elif data[1][0] == "433":
# Skibot name already exists.
print Name+" name already exists."
Name += "_"+version
print "New name:",Name
duplicate_notify = True
conn = irchat.IRC ( Network, Port, Name, "NT_"+version, "NT_"+version, "Trasen_"+version )
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
elif data[1][0] == "482":
sleep(0.05)
conn.privmsg(targetchannel,"Nevermind that, I am not an operator")
CALL_OFF = True
elif data[1] == ["too","fast,","throttled."]:
print "Reconnected too fast."
print "Halting for 2 seconds"
sleep(2)
elif data[1][0] == "Link":
if data[0] == "Closing":
print "Link was closed"
connected = False
# conn.quit()
# break
else:
print data
print data[1][0]
pass
else:
if disconnects > 9000: #IT'S OVER NINE THOUSAAAAND!
break
else: #WHAT NINE THOUSAND? THERE'S NO WAY THAT CAN BE RIGHT
sleep(responsiveness_delay) #WAIT A WHILE AND CHECK AGAIN!
try:
if not connected:
#print pongtarget
#print conn.addressquery()
conn.privmsg(pongtarget,"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join"
connected = True
except ValueError:
try:
conn.privmsg(conn.addressquery()[0],"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join the second time"
connected = True
except ValueError:
print "Both methods failed"
except AttributeError:
print "Conn is not established correctly"
except NameError:
print "Pongtarget isn't yet established"
try:
conn.privmsg(conn.addressquery()[0],"Pong")
sleep(1)
for i in CORE_DATA.channels:
conn.join(i)
sleep(0.5)
print "Attempted to join the second time"
connected = True
except:
print "Both methods failed"<|fim▁end|> | arg1 = arg1.read().split("\n")
arg2 = []
|
<|file_name|>eight.hpp<|end_file_name|><|fim▁begin|>//==============================================================================
// Copyright 2003 - 2011 LASMEA UMR 6602 CNRS/Univ. Clermont II
// Copyright 2009 - 2014 LRI UMR 8623 CNRS/Univ Paris Sud XI
// Copyright 2012 - 2014 MetaScale SAS
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//==============================================================================
#ifndef BOOST_SIMD_CONSTANT_CONSTANTS_SIMD_VMX_ALTIVEC_EIGHT_HPP_INCLUDED
#define BOOST_SIMD_CONSTANT_CONSTANTS_SIMD_VMX_ALTIVEC_EIGHT_HPP_INCLUDED
#ifdef BOOST_SIMD_HAS_VMX_SUPPORT
#include <boost/simd/constant/constants/eight.hpp>
#include <boost/dispatch/attributes.hpp>
namespace boost { namespace simd { namespace ext
{
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < int8_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const<|fim▁hole|>
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < uint8_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const
{
return vec_splat_u8(8);
}
};
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < int16_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const
{
return vec_splat_s16(8);
}
};
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < uint16_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const
{
return vec_splat_u16(8);
}
};
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < int32_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const
{
return vec_splat_s32(8);
}
};
BOOST_SIMD_FUNCTOR_IMPLEMENTATION ( simd::tag::Eight
, boost::simd::tag::vmx_
, (A0)
, ((target_ < simd_ < uint32_<A0>
, boost::simd::tag::vmx_
>
>
))
)
{
typedef typename A0::type result_type;
BOOST_FORCEINLINE result_type operator()(A0 const&) const
{
return vec_splat_u32(8);
}
};
} } }
#endif
#endif<|fim▁end|> | {
return vec_splat_s8(8);
}
}; |
<|file_name|>fixed_interval.rs<|end_file_name|><|fim▁begin|>use std::iter::Iterator;
use std::time::Duration;
/// A retry strategy driven by a fixed interval.
#[derive(Debug, Clone)]
pub struct FixedInterval {
duration: Duration,
}
impl FixedInterval {
/// Constructs a new fixed interval strategy.
pub fn new(duration: Duration) -> FixedInterval {
FixedInterval { duration: duration }
}
/// Constructs a new fixed interval strategy,
/// given a duration in milliseconds.
pub fn from_millis(millis: u64) -> FixedInterval {
FixedInterval {
duration: Duration::from_millis(millis),
}
}<|fim▁hole|>impl Iterator for FixedInterval {
type Item = Duration;
fn next(&mut self) -> Option<Duration> {
Some(self.duration)
}
}
#[test]
fn returns_some_fixed() {
let mut s = FixedInterval::new(Duration::from_millis(123));
assert_eq!(s.next(), Some(Duration::from_millis(123)));
assert_eq!(s.next(), Some(Duration::from_millis(123)));
assert_eq!(s.next(), Some(Duration::from_millis(123)));
}<|fim▁end|> | }
|
<|file_name|>arnoldi.py<|end_file_name|><|fim▁begin|># Copyright 2014-2021 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Extension to scipy.linalg module developed for PBC branch.
'''
import numpy as np
import scipy.linalg
def davidson_nosymm(matvec,size,nroots,Adiag=None):
'''Davidson diagonalization method to solve A c = E c
when A is not Hermitian.
'''
# We don't pass args
def matvec_args(vec, args):
return matvec(vec)
nroots = min(nroots,size)
#if Adiag == None:
# Adiag = matvec(numpy.ones(size))
# Currently not used:
x = np.ones((size,1))
P = np.ones((size,1))
arnold = Arnoldi(matvec_args, x, P, nroots=nroots)
return arnold.solve()
VERBOSE = False
class Arnoldi:
def __init__(self,matr_multiply,xStart,inPreCon,nroots=1,tol=1e-6):
self.matrMultiply = matr_multiply
self.size = xStart.shape[0]
self.nEigen = min(nroots, self.size)
self.maxM = min(30, self.size)
self.maxOuterLoop = 10
self.tol = tol
#
# Creating initial guess and preconditioner
#
self.x0 = xStart.real.copy()
self.iteration = 0
self.totalIter = 0
self.converged = False
self.preCon = inPreCon.copy()
#
# Allocating other vectors
#
self.allocateVecs()
def solve(self):
while self.converged == 0:
if self.totalIter == 0:
self.guessInitial()
for i in range(self.maxM):
if self.deflated == 1:
self.currentSize = self.nEigen
if self.deflated == 0 and self.totalIter > 0:
self.hMult()
self.push_Av()
self.constructSubspace()
self.solveSubspace()
self.constructSol()
self.computeResidual()
self.checkConvergence()
self.deflated = 0
if self.converged:
break
self.updateVecs()
self.checkDeflate()
self.constructDeflatedSub()
self.totalIter += 1
self.currentSize += 1
print("")
print("Converged in %3d cycles" % self.totalIter)
self.constructAllSolV()
return self.outeigs, self.outevecs
def allocateVecs(self):
self.subH = np.zeros( shape=(self.maxM,self.maxM), dtype=complex )
self.sol = np.zeros( shape=(self.maxM), dtype=complex )
self.dgks = np.zeros( shape=(self.maxM), dtype=complex )
self.nConv = np.zeros( shape=(self.maxM), dtype=int )
self.eigs = np.zeros( shape=(self.maxM), dtype=complex )
self.evecs = np.zeros( shape=(self.maxM,self.maxM), dtype=complex )
self.oldeigs = np.zeros( shape=(self.maxM), dtype=complex )
self.deigs = np.zeros( shape=(self.maxM), dtype=complex )
self.outeigs = np.zeros( shape=(self.nEigen), dtype=complex )
self.outevecs = np.zeros( shape=(self.size,self.nEigen), dtype=complex)
self.currentSize = 0
self.Ax = np.zeros( shape=(self.size), dtype=complex )
self.res = np.zeros( shape=(self.size), dtype=complex )
self.vlist = np.zeros( shape=(self.maxM,self.size), dtype=complex )
self.cv = np.zeros( shape = (self.size), dtype = complex )
self.cAv = np.zeros( shape = (self.size), dtype = complex )
self.Avlist = np.zeros( shape=(self.maxM,self.size), dtype=complex )
self.dres = 999.9
self.resnorm = 999.9
self.cvEig = 0.1
self.ciEig = 0
self.deflated = 0
def guessInitial(self):
nrm = np.linalg.norm(self.x0)
self.x0 *= 1./nrm
self.currentSize = self.nEigen
for i in range(self.currentSize):
self.vlist[i] *= 0.0
self.vlist[i,i] = 1.0 + 0.0*1j
self.vlist[i] /= np.linalg.norm(self.vlist[i])
for i in range(self.currentSize):
self.cv = self.vlist[i].copy()
self.hMult()
self.Avlist[i] = self.cAv.copy()
self.constructSubspace()
def hMult(self):
args = 0
self.cAv = self.matrMultiply(self.cv.reshape(self.size),args)
def push_Av(self):
self.Avlist[self.currentSize-1] = self.cAv.reshape(self.size)
def constructSubspace(self):
if self.totalIter == 0 or self.deflated == 1: # construct the full block of v^*Av
for i in range(self.currentSize):
for j in range(self.currentSize):
val = np.vdot(self.vlist[i],self.Avlist[j])
self.subH[i,j] = val
else:
for j in range(self.currentSize):
if j <= (self.currentSize-1):
val = np.vdot(self.vlist[j],self.Avlist[self.currentSize-1])
self.subH[j,self.currentSize-1] = val
if j < (self.currentSize-1):
val = np.vdot(self.vlist[self.currentSize-1],self.Avlist[j])
self.subH[self.currentSize-1,j] = val
def solveSubspace(self):
w, v = scipy.linalg.eig(self.subH[:self.currentSize,:self.currentSize])
idx = w.real.argsort()
#imag_norm = np.linalg.norm(w.imag)
#if imag_norm > 1e-12:
# print " *************************************************** "
# print " WARNING IMAGINARY EIGENVALUE OF NORM %.15g " % (imag_norm)
# print " *************************************************** "
#print "Imaginary norm eigenvectors = ", np.linalg.norm(v.imag)
#print "Imaginary norm eigenvalue = ", np.linalg.norm(w.imag)
v = v[:,idx]
w = w[idx].real
self.sol[:self.currentSize] = v[:,self.ciEig]
self.evecs[:self.currentSize,:self.currentSize] = v
self.eigs[:self.currentSize] = w[:self.currentSize]
self.outeigs[:self.nEigen] = w[:self.nEigen]
self.cvEig = self.eigs[self.ciEig]
def constructAllSolV(self):
for i in range(self.nEigen):<|fim▁hole|> self.outevecs[:,i] = self.cv
def constructSol(self):
self.constructSolV()
self.constructSolAv()
def constructSolV(self):
self.cv = np.dot(self.vlist[:self.currentSize].transpose(),self.sol[:self.currentSize])
def constructSolAv(self):
self.cAv = np.dot(self.Avlist[:self.currentSize].transpose(),self.sol[:self.currentSize])
def computeResidual(self):
self.res = self.cAv - self.cvEig * self.cv
self.dres = np.vdot(self.res,self.res)**0.5
#
# gram-schmidt for residual vector
#
for i in range(self.currentSize):
self.dgks[i] = np.vdot( self.vlist[i], self.res )
self.res -= self.dgks[i]*self.vlist[i]
#
# second gram-schmidt to make them really orthogonal
#
for i in range(self.currentSize):
self.dgks[i] = np.vdot( self.vlist[i], self.res )
self.res -= self.dgks[i]*self.vlist[i]
self.resnorm = np.linalg.norm(self.res)
self.res /= self.resnorm
orthog = 0.0
for i in range(self.currentSize):
orthog += np.vdot(self.res,self.vlist[i])**2.0
orthog = orthog ** 0.5
if not self.deflated:
if VERBOSE:
print("%3d %20.14f %20.14f %10.4g" % (self.ciEig, self.cvEig.real, self.resnorm.real, orthog.real))
#else:
# print "%3d %20.14f %20.14f %20.14f (deflated)" % (self.ciEig, self.cvEig,
# self.resnorm, orthog)
self.iteration += 1
def updateVecs(self):
self.vlist[self.currentSize] = self.res.copy()
self.cv = self.vlist[self.currentSize]
def checkConvergence(self):
if self.resnorm < self.tol:
if VERBOSE:
print("Eigenvalue %3d converged! (res = %.15g)" % (self.ciEig, self.resnorm))
self.ciEig += 1
if self.ciEig == self.nEigen:
self.converged = True
if self.resnorm < self.tol and not self.converged:
if VERBOSE:
print("")
print("")
print("%-3s %-20s %-20s %-8s" % ("#", " Eigenvalue", " Res. Norm.", " Ortho. (should be ~0)"))
def gramSchmidtCurrentVec(self,northo):
for i in range(northo):
self.dgks[i] = np.vdot( self.vlist[i], self.cv )
self.cv -= self.dgks[i]*self.vlist[i] #/ np.vdot(self.vlist[i],self.vlist[i])
self.cv /= np.linalg.norm(self.cv)
def checkDeflate(self):
if self.currentSize == self.maxM-1:
self.deflated = 1
#print "deflating..."
for i in range(self.nEigen):
self.sol[:self.currentSize] = self.evecs[:self.currentSize,i]
# Finds the "best" eigenvector for this eigenvalue
self.constructSolV()
# Puts this guess in self.Avlist rather than self.vlist for now...
# since this would mess up self.constructSolV()'s solution
self.Avlist[i] = self.cv.copy()
for i in range(self.nEigen):
# This is actually the "best" eigenvector v, not A*v (see above)
self.cv = self.Avlist[i].copy()
self.gramSchmidtCurrentVec(i)
self.vlist[i] = self.cv.copy()
for i in range(self.nEigen):
# This is actually the "best" eigenvector v, not A*v (see above)
self.cv = self.vlist[i].copy()
# Use current vector cv to create cAv
self.hMult()
self.Avlist[i] = self.cAv.copy()
def constructDeflatedSub(self):
if self.deflated == 1:
self.currentSize = self.nEigen
self.constructSubspace()<|fim▁end|> | self.sol[:] = self.evecs[:,i]
self.cv = np.dot(self.vlist[:self.currentSize].transpose(),self.sol[:self.currentSize]) |
<|file_name|>test_install.py<|end_file_name|><|fim▁begin|>import os
import textwrap
import glob
from os.path import join, curdir, pardir
import pytest
from pip.utils import appdirs, rmtree
from tests.lib import (pyversion, pyversion_tuple,
_create_test_package, _create_svn_repo, path_to_url)
from tests.lib.local_repos import local_checkout
from tests.lib.path import Path
def test_without_setuptools(script, data):
script.run("pip", "uninstall", "setuptools", "-y")
result = script.run(
"python", "-c",
"import pip; pip.main(["
"'install', "
"'INITools==0.2', "
"'-f', '%s', "
"'--no-use-wheel'])" % data.packages,
expect_error=True,
)
assert (
"setuptools must be installed to install from a source distribution"
in result.stderr
)
def test_pip_second_command_line_interface_works(script, data):
"""
Check if ``pip<PYVERSION>`` commands behaves equally
"""
# On old versions of Python, urllib3/requests will raise a warning about
# the lack of an SSLContext.
kwargs = {}
if pyversion_tuple < (2, 7, 9):
kwargs['expect_stderr'] = True
args = ['pip%s' % pyversion]
args.extend(['install', 'INITools==0.2'])
args.extend(['-f', data.packages])<|fim▁hole|> egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_from_pypi(script):
"""
Test installing a package from PyPI.
"""
result = script.pip('install', '-vvv', 'INITools==0.2')
egg_info_folder = (
script.site_packages / 'INITools-0.2-py%s.egg-info' % pyversion
)
initools_folder = script.site_packages / 'initools'
assert egg_info_folder in result.files_created, str(result)
assert initools_folder in result.files_created, str(result)
def test_editable_install(script):
"""
Test editable installation.
"""
result = script.pip('install', '-e', 'INITools==0.2', expect_error=True)
assert (
"INITools==0.2 should either be a path to a local project or a VCS url"
in result.stderr
)
assert not result.files_created
assert not result.files_updated
def test_install_editable_from_svn(script):
"""
Test checking out from svn.
"""
checkout_path = _create_test_package(script)
repo_url = _create_svn_repo(script, checkout_path)
result = script.pip(
'install',
'-e', 'svn+' + repo_url + '#egg=version-pkg'
)
result.assert_installed('version-pkg', with_files=['.svn'])
@pytest.mark.network
def test_download_editable_to_custom_path(script, tmpdir):
"""
Test downloading an editable using a relative custom src folder.
"""
script.scratch_path.join("customdl").mkdir()
result = script.pip(
'install',
'-e',
'%s#egg=initools-dev' %
local_checkout(
'svn+http://svn.colorstudy.com/INITools/trunk',
tmpdir.join("cache")
),
'--src',
'customsrc',
'--download',
'customdl',
)
customsrc = Path('scratch') / 'customsrc' / 'initools'
assert customsrc in result.files_created, (
sorted(result.files_created.keys())
)
assert customsrc / 'setup.py' in result.files_created, (
sorted(result.files_created.keys())
)
customdl = Path('scratch') / 'customdl' / 'initools'
customdl_files_created = [
filename for filename in result.files_created
if filename.startswith(customdl)
]
assert customdl_files_created
@pytest.mark.network
def test_install_dev_version_from_pypi(script):
"""
Test using package==dev.
"""
result = script.pip(
'install', 'INITools===dev',
'--allow-external', 'INITools',
'--allow-unverified', 'INITools',
expect_error=True,
)
assert (script.site_packages / 'initools') in result.files_created, (
str(result.stdout)
)
def _test_install_editable_from_git(script, tmpdir, wheel):
"""Test cloning from Git."""
if wheel:
script.pip('install', 'wheel')
pkg_path = _create_test_package(script, name='testpackage', vcs='git')
args = ['install', '-e', 'git+%s#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('testpackage', with_files=['.git'])
def test_install_editable_from_git(script, tmpdir):
_test_install_editable_from_git(script, tmpdir, False)
def test_install_editable_from_git_autobuild_wheel(script, tmpdir):
_test_install_editable_from_git(script, tmpdir, True)
def test_install_editable_from_hg(script, tmpdir):
"""Test cloning from Mercurial."""
pkg_path = _create_test_package(script, name='testpackage', vcs='hg')
args = ['install', '-e', 'hg+%s#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('testpackage', with_files=['.hg'])
def test_vcs_url_final_slash_normalization(script, tmpdir):
"""
Test that presence or absence of final slash in VCS URL is normalized.
"""
pkg_path = _create_test_package(script, name='testpackage', vcs='hg')
args = ['install', '-e', 'hg+%s/#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('testpackage', with_files=['.hg'])
def test_install_editable_from_bazaar(script, tmpdir):
"""Test checking out from Bazaar."""
pkg_path = _create_test_package(script, name='testpackage', vcs='bazaar')
args = ['install', '-e', 'bzr+%s/#egg=testpackage' % path_to_url(pkg_path)]
result = script.pip(*args, **{"expect_error": True})
result.assert_installed('testpackage', with_files=['.bzr'])
@pytest.mark.network
def test_vcs_url_urlquote_normalization(script, tmpdir):
"""
Test that urlquoted characters are normalized for repo URL comparison.
"""
script.pip(
'install', '-e',
'%s/#egg=django-wikiapp' %
local_checkout(
'bzr+http://bazaar.launchpad.net/%7Edjango-wikiapp/django-wikiapp'
'/release-0.1',
tmpdir.join("cache"),
),
)
def test_install_from_local_directory(script, data):
"""
Test installing from a local directory.
"""
to_install = data.packages.join("FSPkg")
result = script.pip('install', to_install, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_symlinks_to_directories(
script, data):
"""
Test installing from a local directory containing symlinks to directories.
"""
to_install = data.packages.join("symlinks")
result = script.pip('install', to_install, expect_error=False)
pkg_folder = script.site_packages / 'symlinks'
egg_info_folder = (
script.site_packages / 'symlinks-0.1.dev0-py%s.egg-info' % pyversion
)
assert pkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stderr
def test_editable_install_from_local_directory_with_no_setup_py(script, data):
"""
Test installing from a local directory with no 'setup.py'.
"""
result = script.pip('install', '-e', data.root, expect_error=True)
assert not result.files_created
assert "is not installable. File 'setup.py' not found." in result.stderr
def test_install_as_egg(script, data):
"""
Test installing as egg, instead of flat install.
"""
to_install = data.packages.join("FSPkg")
result = script.pip('install', to_install, '--egg', expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_folder = script.site_packages / 'FSPkg-0.1.dev0-py%s.egg' % pyversion
assert fspkg_folder not in result.files_created, str(result.stdout)
assert egg_folder in result.files_created, str(result)
assert join(egg_folder, 'fspkg') in result.files_created, str(result)
def test_install_curdir(script, data):
"""
Test installing current directory ('.').
"""
run_from = data.packages.join("FSPkg")
# Python 2.4 Windows balks if this exists already
egg_info = join(run_from, "FSPkg.egg-info")
if os.path.isdir(egg_info):
rmtree(egg_info)
result = script.pip('install', curdir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_pardir(script, data):
"""
Test installing parent directory ('..').
"""
run_from = data.packages.join("FSPkg", "fspkg")
result = script.pip('install', pardir, cwd=run_from, expect_error=False)
fspkg_folder = script.site_packages / 'fspkg'
egg_info_folder = (
script.site_packages / 'FSPkg-0.1.dev0-py%s.egg-info' % pyversion
)
assert fspkg_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_global_option(script):
"""
Test using global distutils options.
(In particular those that disable the actual install action)
"""
result = script.pip(
'install', '--global-option=--version', "INITools==0.1",
)
assert '0.1\n' in result.stdout
def test_install_with_pax_header(script, data):
"""
test installing from a tarball with pax header for python<2.6
"""
script.pip('install', 'paxpkg.tar.bz2', cwd=data.packages)
def test_install_with_hacked_egg_info(script, data):
"""
test installing a package which defines its own egg_info class
"""
run_from = data.packages.join("HackedEggInfo")
result = script.pip('install', '.', cwd=run_from)
assert 'Successfully installed hackedegginfo-0.0.0\n' in result.stdout
@pytest.mark.network
def test_install_using_install_option_and_editable(script, tmpdir):
"""
Test installing a tool using -e and --install-option
"""
folder = 'script_folder'
script.scratch_path.join(folder).mkdir()
url = 'git+git://github.com/pypa/pip-test-package'
result = script.pip(
'install', '-e', '%s#egg=pip-test-package' %
local_checkout(url, tmpdir.join("cache")),
'--install-option=--script-dir=%s' % folder
)
script_file = (
script.venv / 'src' / 'pip-test-package' /
folder / 'pip-test-package' + script.exe
)
assert script_file in result.files_created
@pytest.mark.network
def test_install_global_option_using_editable(script, tmpdir):
"""
Test using global distutils options, but in an editable installation
"""
url = 'hg+http://bitbucket.org/runeh/anyjson'
result = script.pip(
'install', '--global-option=--version', '-e',
'%[email protected]#egg=anyjson' % local_checkout(url, tmpdir.join("cache"))
)
assert 'Successfully installed anyjson' in result.stdout
@pytest.mark.network
def test_install_package_with_same_name_in_curdir(script):
"""
Test installing a package with the same name of a local folder
"""
script.scratch_path.join("mock==0.6").mkdir()
result = script.pip('install', 'mock==0.6')
egg_folder = script.site_packages / 'mock-0.6.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
mock100_setup_py = textwrap.dedent('''\
from setuptools import setup
setup(name='mock',
version='100.1')''')
def test_install_folder_using_dot_slash(script):
"""
Test installing a folder using pip install ./foldername
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', './mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_slash_in_the_end(script):
r"""
Test installing a folder using pip install foldername/ or foldername\
"""
script.scratch_path.join("mock").mkdir()
pkg_path = script.scratch_path / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', 'mock' + os.path.sep)
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_install_folder_using_relative_path(script):
"""
Test installing a folder using pip install folder1/folder2
"""
script.scratch_path.join("initools").mkdir()
script.scratch_path.join("initools", "mock").mkdir()
pkg_path = script.scratch_path / 'initools' / 'mock'
pkg_path.join("setup.py").write(mock100_setup_py)
result = script.pip('install', Path('initools') / 'mock')
egg_folder = script.site_packages / 'mock-100.1-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_install_package_which_contains_dev_in_name(script):
"""
Test installing package from pypi which contains 'dev' in name
"""
result = script.pip('install', 'django-devserver==0.0.4')
devserver_folder = script.site_packages / 'devserver'
egg_info_folder = (
script.site_packages / 'django_devserver-0.0.4-py%s.egg-info' %
pyversion
)
assert devserver_folder in result.files_created, str(result.stdout)
assert egg_info_folder in result.files_created, str(result)
def test_install_package_with_target(script):
"""
Test installing a package using pip install --target
"""
target_dir = script.scratch_path / 'target'
result = script.pip_install_local('-t', target_dir, "simple==1.0")
assert Path('scratch') / 'target' / 'simple' in result.files_created, (
str(result)
)
# Test repeated call without --upgrade, no files should have changed
result = script.pip_install_local(
'-t', target_dir, "simple==1.0", expect_stderr=True,
)
assert not Path('scratch') / 'target' / 'simple' in result.files_updated
# Test upgrade call, check that new version is installed
result = script.pip_install_local('--upgrade', '-t',
target_dir, "simple==2.0")
assert Path('scratch') / 'target' / 'simple' in result.files_updated, (
str(result)
)
egg_folder = (
Path('scratch') / 'target' / 'simple-2.0-py%s.egg-info' % pyversion)
assert egg_folder in result.files_created, (
str(result)
)
# Test install and upgrade of single-module package
result = script.pip_install_local('-t', target_dir, 'singlemodule==0.0.0')
singlemodule_py = Path('scratch') / 'target' / 'singlemodule.py'
assert singlemodule_py in result.files_created, str(result)
result = script.pip_install_local('-t', target_dir, 'singlemodule==0.0.1',
'--upgrade')
assert singlemodule_py in result.files_updated, str(result)
def test_install_package_with_root(script, data):
"""
Test installing a package using pip install --root
"""
root_dir = script.scratch_path / 'root'
result = script.pip(
'install', '--root', root_dir, '-f', data.find_links, '--no-index',
'simple==1.0',
)
normal_install_path = (
script.base_path / script.site_packages / 'simple-1.0-py%s.egg-info' %
pyversion
)
# use distutils to change the root exactly how the --root option does it
from distutils.util import change_root
root_path = change_root(
os.path.join(script.scratch, 'root'),
normal_install_path
)
assert root_path in result.files_created, str(result)
# skip on win/py3 for now, see issue #782
@pytest.mark.skipif("sys.platform == 'win32' and sys.version_info >= (3,)")
def test_install_package_that_emits_unicode(script, data):
"""
Install a package with a setup.py that emits UTF-8 output and then fails.
Refs https://github.com/pypa/pip/issues/326
"""
to_install = data.packages.join("BrokenEmitsUTF8")
result = script.pip(
'install', to_install, expect_error=True, expect_temp=True, quiet=True,
)
assert (
'FakeError: this package designed to fail on install' in result.stdout
)
assert 'UnicodeDecodeError' not in result.stdout
def test_install_package_with_utf8_setup(script, data):
"""Install a package with a setup.py that declares a utf-8 encoding."""
to_install = data.packages.join("SetupPyUTF8")
script.pip('install', to_install)
def test_install_package_with_latin1_setup(script, data):
"""Install a package with a setup.py that declares a latin-1 encoding."""
to_install = data.packages.join("SetupPyLatin1")
script.pip('install', to_install)
def test_url_req_case_mismatch_no_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/data/packages contains Upper-1.0.tar.gz and Upper-2.0.tar.gz
'requiresupper' has install_requires = ['upper']
"""
Upper = os.path.join(data.find_links, 'Upper-1.0.tar.gz')
result = script.pip(
'install', '--no-index', '-f', data.find_links, Upper, 'requiresupper'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_req_case_mismatch_file_index(script, data):
"""
tar ball url requirements (with no egg fragment), that happen to have upper
case project names, should be considered equal to later requirements that
reference the project name using lower case.
tests/data/packages3 contains Dinner-1.0.tar.gz and Dinner-2.0.tar.gz
'requiredinner' has install_requires = ['dinner']
This test is similar to test_url_req_case_mismatch_no_index; that test
tests behaviour when using "--no-index -f", while this one does the same
test when using "--index-url". Unfortunately this requires a different
set of packages as it requires a prepared index.html file and
subdirectory-per-package structure.
"""
Dinner = os.path.join(data.find_links3, 'Dinner', 'Dinner-1.0.tar.gz')
result = script.pip(
'install', '--index-url', data.find_links3, Dinner, 'requiredinner'
)
# only Upper-1.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
def test_url_incorrect_case_no_index(script, data):
"""
Same as test_url_req_case_mismatch_no_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--no-index', '-f', data.find_links, "upper",
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Upper-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Upper-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
def test_url_incorrect_case_file_index(script, data):
"""
Same as test_url_req_case_mismatch_file_index, except testing for the case
where the incorrect case is given in the name of the package to install
rather than in a requirements file.
"""
result = script.pip(
'install', '--index-url', data.find_links3, "dinner",
expect_stderr=True,
)
# only Upper-2.0.tar.gz should get installed.
egg_folder = script.site_packages / 'Dinner-1.0-py%s.egg-info' % pyversion
assert egg_folder not in result.files_created, str(result)
egg_folder = script.site_packages / 'Dinner-2.0-py%s.egg-info' % pyversion
assert egg_folder in result.files_created, str(result)
@pytest.mark.network
def test_compiles_pyc(script):
"""
Test installing with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert any(exists)
@pytest.mark.network
def test_no_compiles_pyc(script, data):
"""
Test installing from wheel with --compile on
"""
del script.environ["PYTHONDONTWRITEBYTECODE"]
script.pip("install", "--no-compile", "--no-use-wheel", "INITools==0.2")
# There are many locations for the __init__.pyc file so attempt to find
# any of them
exists = [
os.path.exists(script.site_packages_path / "initools/__init__.pyc"),
]
exists += glob.glob(
script.site_packages_path / "initools/__pycache__/__init__*.pyc"
)
assert not any(exists)
def test_install_upgrade_editable_depending_on_other_editable(script):
script.scratch_path.join("pkga").mkdir()
pkga_path = script.scratch_path / 'pkga'
pkga_path.join("setup.py").write(textwrap.dedent("""
from setuptools import setup
setup(name='pkga',
version='0.1')
"""))
script.pip('install', '--editable', pkga_path)
result = script.pip('list')
assert "pkga" in result.stdout
script.scratch_path.join("pkgb").mkdir()
pkgb_path = script.scratch_path / 'pkgb'
pkgb_path.join("setup.py").write(textwrap.dedent("""
from setuptools import setup
setup(name='pkgb',
version='0.1',
install_requires=['pkga'])
"""))
script.pip('install', '--upgrade', '--editable', pkgb_path)
result = script.pip('list')
assert "pkgb" in result.stdout
def test_install_topological_sort(script, data):
args = ['install', 'TopoRequires4', '-f', data.packages]
res = str(script.pip(*args, expect_error=False))
order1 = 'TopoRequires, TopoRequires2, TopoRequires3, TopoRequires4'
order2 = 'TopoRequires, TopoRequires3, TopoRequires2, TopoRequires4'
assert order1 in res or order2 in res, res
def test_install_wheel_broken(script, data):
script.pip('install', 'wheel')
res = script.pip(
'install', '--no-index', '-f', data.find_links, 'wheelbroken',
expect_stderr=True)
assert "Successfully installed wheelbroken-0.1" in str(res), str(res)
def test_install_builds_wheels(script, data):
# NB This incidentally tests a local tree + tarball inputs
# see test_install_editable_from_git_autobuild_wheel for editable
# vcs coverage.
script.pip('install', 'wheel')
to_install = data.packages.join('requires_wheelbroken_upper')
res = script.pip(
'install', '--no-index', '-f', data.find_links,
to_install, expect_stderr=True)
expected = ("Successfully installed requires-wheelbroken-upper-0"
" upper-2.0 wheelbroken-0.1")
# Must have installed it all
assert expected in str(res), str(res)
root = appdirs.user_cache_dir('pip')
wheels = []
for top, dirs, files in os.walk(root):
wheels.extend(files)
# and built wheels for upper and wheelbroken
assert "Running setup.py bdist_wheel for upper" in str(res), str(res)
assert "Running setup.py bdist_wheel for wheelb" in str(res), str(res)
# But not requires_wheel... which is a local dir and thus uncachable.
assert "Running setup.py bdist_wheel for requir" not in str(res), str(res)
# wheelbroken has to run install
# into the cache
assert wheels != [], str(res)
# and installed from the wheel
assert "Running setup.py install for upper" not in str(res), str(res)
# the local tree can't build a wheel (because we can't assume that every
# build will have a suitable unique key to cache on).
assert "Running setup.py install for requires-wheel" in str(res), str(res)
# wheelbroken has to run install
assert "Running setup.py install for wheelb" in str(res), str(res)<|fim▁end|> | result = script.run(*args, **kwargs) |
<|file_name|>Function.ts<|end_file_name|><|fim▁begin|>function Sum(x: number, y: number) : void {
console.log('processNumKeyPairs: key = ' + key + ', value = ' + value)
return x + y;
}
let greeting = function() {
console.log("Hello TypeScript!");
};
let SumAnon = function(x: number, y: number) : number
{
return x + y;
}
function Greet(greeting: string, name?: string ) : string {
return greeting + ' ' + name + '!';
}
function terminateJob(jobId: string) {
return this.http.delete<IOperationResult<any>>();
}
function Greet2(name: string, greeting: string = "Hello") : string {
return greeting + ' ' + name + '!';
}
Greet(undefined, 'Steve');
let sumArrow = (x: number, y: number): number => {
return x + y
}
let Print = () => console.log("Hello TypeScript");
let sumShortArrow = (x: number, y: number) => x + y;
function Greet(greeting: string, ...names: string[]) {
return greeting + " " + names.join(", ") + "!";
}
<|fim▁hole|>}
function buildName(firstName: string, lastName?: string) {
if (lastName) return firstName + " " + lastName;
else return firstName;
}<|fim▁end|> | function Test(value: TestClass | TestClass2): value is TestClass {
return (<TestClass>value).someFunction !== undefined;
|
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># Copyright 2021 ACSONE SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Mozaik Mass Mailing Access Rights",
"summary": """
New group: Mass Mailing Manager. Managers can edit
and unlink mass mailings.""",
"version": "14.0.1.0.0",
"license": "AGPL-3",
"author": "ACSONE SA/NV",
"website": "https://github.com/OCA/mozaik",
"depends": [
"mass_mailing",
],
"data": [<|fim▁hole|> "security/ir.model.access.csv",
"views/mailing_mailing.xml",
"views/mail_template.xml",
],
"demo": [],
}<|fim▁end|> | "security/groups.xml", |
<|file_name|>notification.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { ToasterService, ToasterConfig, Toast } from 'angular2-toaster/angular2-toaster';
@Injectable()
export class NotificationService {
constructor(private toasterService: ToasterService) {
}
public toasterconfig: ToasterConfig =
new ToasterConfig({
showCloseButton: true,
tapToDismiss: true,
timeout: 3000,
limit: 5,
positionClass: 'toast-top-right',
});
<|fim▁hole|>}<|fim▁end|> | popToast(type: any, title: string, body: string) {
this.toasterService.pop(type, title, body);
} |
<|file_name|>discoverer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2014 Simon Jagoe and Enthought Ltd.
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
import logging
import os
from haas.plugins.discoverer import match_path
from haas.plugins.i_discoverer_plugin import IDiscovererPlugin
from .yaml_test_loader import YamlTestLoader
logger = logging.getLogger(__name__)
class RestTestDiscoverer(IDiscovererPlugin):
"""A ``haas`` test discovery plugin to generate Web API test cases from
YAML descriptions.
Parameters
----------
loader : haas.loader.Loader
The ``haas`` test loader.
"""
def __init__(self, loader, **kwargs):
super(RestTestDiscoverer, self).__init__(**kwargs)
self._loader = loader
self._yaml_loader = YamlTestLoader(loader)
@classmethod
def from_args(cls, args, arg_prefix, loader):
"""Construct the discoverer from parsed command line arguments.
Parameters
----------
args : argparse.Namespace
The ``argparse.Namespace`` containing parsed arguments.
arg_prefix : str
The prefix used for arguments beloning solely to this plugin.
loader : haas.loader.Loader
The test loader used to construct TestCase and TestSuite instances.
"""
return cls(loader)
@classmethod
def add_parser_arguments(cls, parser, option_prefix, dest_prefix):
"""Add options for the plugin to the main argument parser.
Parameters
----------
parser : argparse.ArgumentParser
The parser to extend
option_prefix : str
The prefix that option strings added by this plugin should use.
dest_prefix : str
The prefix that ``dest`` strings for options added by this
plugin should use.
"""
def discover(self, start, top_level_directory=None, pattern=None):
"""Discover YAML-formatted Web API tests.
Parameters
----------
start : str
Directory from which to recursively discover test cases.
top_level_directory : None
Ignored; for API compatibility with haas.
pattern : None
Ignored; for API compatibility with haas.
"""
if os.path.isdir(start):
start_directory = start
return self._discover_by_directory(start_directory)
elif os.path.isfile(start):
start_filepath = start
return self._discover_by_file(start_filepath)
return self._loader.create_suite()
def _discover_by_directory(self, start_directory):
"""Run test discovery in a directory.
Parameters
----------<|fim▁hole|> start_directory = os.path.abspath(start_directory)
tests = self._discover_tests(start_directory)
return self._loader.create_suite(list(tests))
def _discover_by_file(self, start_filepath):
"""Run test discovery on a single file.
Parameters
----------
start_filepath : str
The module file in which to start test discovery.
"""
start_filepath = os.path.abspath(start_filepath)
logger.debug('Discovering tests in file: start_filepath=%r',
start_filepath)
tests = self._load_from_file(start_filepath)
return self._loader.create_suite(list(tests))
def _load_from_file(self, filepath):
logger.debug('Loading tests from %r', filepath)
tests = self._yaml_loader.load_tests_from_file(filepath)
return self._loader.create_suite(tests)
def _discover_tests(self, start_directory):
pattern = 'test*.yml'
for curdir, dirnames, filenames in os.walk(start_directory):
logger.debug('Discovering tests in %r', curdir)
for filename in filenames:
filepath = os.path.join(curdir, filename)
if not match_path(filename, filepath, pattern):
logger.debug('Skipping %r', filepath)
continue
yield self._load_from_file(filepath)<|fim▁end|> | start_directory : str
The package directory in which to start test discovery.
""" |
<|file_name|>flags.rs<|end_file_name|><|fim▁begin|>use coll::options::{CursorType, FindOptions};
/// Represents the bit vector of options for an OP_REPLY message.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct OpReplyFlags {
pub cursor_not_found: bool, // Bit 0
pub query_failure: bool, // Bit 1
pub await_capable: bool, // Bit 3
// All bits remaining must be 0
}
impl OpReplyFlags {
/// Constructs a new struct from a bit vector of options.
///
/// # Return value
///
/// Returns the newly-created struct.
pub fn from_i32(i: i32) -> OpReplyFlags {
let cursor_not_found = (i & 1) != 0;
let query_failure = (i & (1 << 1)) != 0;
let await_capable = (i & (1 << 3)) != 0;
OpReplyFlags { cursor_not_found: cursor_not_found,
query_failure: query_failure,
await_capable: await_capable }
}
}
/// Represents the bit vector of options for an OP_UPDATE message.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct OpUpdateFlags {
pub upsert: bool, // Bit 0
pub multi_update: bool, // Bit 1
// All bits remaining must be 0
}
/// Represents the bit vector of flags for an OP_INSERT message.
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct OpInsertFlags {
pub continue_on_error: bool, // Bit 0
// All bits remaining must be 0
}<|fim▁hole|>pub struct OpQueryFlags {
pub tailable_cursor: bool, // Bit 1
pub slave_ok: bool, // Bit 2
pub oplog_relay: bool, // Bit 3
pub no_cursor_timeout: bool, // Bit 4
pub await_data: bool, // Bit 5
pub exhaust: bool, // Bit 6
pub partial: bool, // Bit 7
// All bits remaining must be 0
}
impl OpUpdateFlags {
/// Constructs a new struct with all flags set to false.
///
/// # Return value
///
/// Returns the newly-created struct.
pub fn no_flags() -> OpUpdateFlags {
OpUpdateFlags { upsert: false, multi_update: false }
}
/// Gets the actual bit vector that the struct represents.
///
/// # Return value
///
/// Returns the bit vector as an i32.
pub fn to_i32(&self) -> i32 {
let mut i = 0 as i32;
if self.upsert {
i = 1;
}
if self.multi_update {
i |= 1 << 1;
}
i
}
}
impl OpInsertFlags {
/// Constructs a new struct with all flags set to false.
///
/// # Return value
///
/// Returns the newly-created struct.
pub fn no_flags() -> OpInsertFlags {
OpInsertFlags { continue_on_error: false }
}
/// Gets the actual bit vector that the struct represents.
///
/// # Return value
///
/// Returns the bit vector as an i32.
pub fn to_i32(&self) -> i32 {
if self.continue_on_error {
1
} else {
0
}
}
}
impl OpQueryFlags {
/// Constructs a new struct with all flags set to false.
///
/// # Return value
///
/// Returns the newly-created struct.
pub fn no_flags() -> OpQueryFlags {
OpQueryFlags { tailable_cursor: false, slave_ok: false,
oplog_relay: false, no_cursor_timeout: false,
await_data: false, exhaust: false, partial: false }
}
/// Constructs a new struct with flags based on a FindOptions struct.
///
/// # Arguments
///
/// options - Struct whose fields contain the flags to initialize the new
/// OpQueryFlags with
///
/// # Return value
///
/// Returns the newly created OpQueryFlags struct.
pub fn with_find_options<'a>(options: &'a FindOptions) -> OpQueryFlags {
OpQueryFlags {
tailable_cursor: options.cursor_type != CursorType::NonTailable,
slave_ok: false,
oplog_relay: options.op_log_replay,
no_cursor_timeout: options.no_cursor_timeout,
await_data: options.cursor_type == CursorType::TailableAwait,
exhaust: false,
partial: options.allow_partial_results,
}
}
/// Gets the actual bit vector that the struct represents.
///
/// # Return value
///
/// Returns the bit vector as an i32.
pub fn to_i32(&self) -> i32 {
let mut i = 0 as i32;
if self.tailable_cursor {
i |= 1 << 1;
}
if self.slave_ok {
i |= 1 << 2;
}
if self.oplog_relay {
i |= 1 << 3;
}
if self.no_cursor_timeout {
i |= 1 << 4;
}
if self.await_data {
i |= 1 << 5;
}
if self.exhaust {
i |= 1 << 6;
}
if self.partial {
i |= 1 << 7;
}
i
}
}<|fim▁end|> |
/// Represents the bit vector of flags for an OP_QUERY message.
#[derive(Clone, Copy, Debug, PartialEq, Eq)] |
<|file_name|>JsonEntity.java<|end_file_name|><|fim▁begin|>/**
*
*/
package com.eclipsesource.gerrit.plugins.fileattachment.api.entities;
/**
* Represents an JSON entity that is passed between the client and the server.
* This is the base interface that should be used for all JSON entities.
*
* @author Florian Zoubek<|fim▁hole|> */
public interface JsonEntity {
}<|fim▁end|> | * |
<|file_name|>consts.rs<|end_file_name|><|fim▁begin|>#![allow(dead_code)]
use winapi::*;
//winsdk 7.0A windowx.h
//pub const WM_CTLCOLOR: UINT = 0x0019;
//winsdk 7.0A OleCtl.h
pub const OCM__BASE: UINT = WM_USER + 0x1c00;
pub const OCM_COMMAND: UINT = OCM__BASE + WM_COMMAND;
<|fim▁hole|>pub const OCM_CTLCOLORDLG: UINT = OCM__BASE + WM_CTLCOLORDLG;
pub const OCM_CTLCOLORLISTBOX: UINT = OCM__BASE + WM_CTLCOLORLISTBOX;
pub const OCM_CTLCOLORMSGBOX: UINT = OCM__BASE + WM_CTLCOLORMSGBOX;
pub const OCM_CTLCOLORSCROLLBAR:UINT = OCM__BASE + WM_CTLCOLORSCROLLBAR;
pub const OCM_CTLCOLORSTATIC: UINT = OCM__BASE + WM_CTLCOLORSTATIC;
//pub const OCM_CTLCOLOR: UINT = OCM__BASE + WM_CTLCOLOR;
pub const OCM_DRAWITEM: UINT = OCM__BASE + WM_DRAWITEM;
pub const OCM_MEASUREITEM: UINT = OCM__BASE + WM_MEASUREITEM;
pub const OCM_DELETEITEM: UINT = OCM__BASE + WM_DELETEITEM;
pub const OCM_VKEYTOITEM: UINT = OCM__BASE + WM_VKEYTOITEM;
pub const OCM_CHARTOITEM: UINT = OCM__BASE + WM_CHARTOITEM;
pub const OCM_COMPAREITEM: UINT = OCM__BASE + WM_COMPAREITEM;
pub const OCM_HSCROLL: UINT = OCM__BASE + WM_HSCROLL;
pub const OCM_VSCROLL: UINT = OCM__BASE + WM_VSCROLL;
pub const OCM_PARENTNOTIFY: UINT = OCM__BASE + WM_PARENTNOTIFY;
pub const OCM_NOTIFY: UINT = OCM__BASE + WM_NOTIFY;
pub const WINSTATE_DESTROYED: DWORD = 0x00000001;
pub const DWLP_MSGRESULT: LRESULT = 0;<|fim▁end|> | pub const OCM_CTLCOLORBTN: UINT = OCM__BASE + WM_CTLCOLORBTN;
pub const OCM_CTLCOLOREDIT: UINT = OCM__BASE + WM_CTLCOLOREDIT; |
<|file_name|>static_virtual_machine.py<|end_file_name|><|fim▁begin|># Copyright 2014 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to represent a Static Virtual Machine object.
All static VMs provided in a given group will be used before any non-static
VMs are provisioned. For example, in a test that uses 4 VMs, if 3 static VMs
are provided, all of them will be used and one additional non-static VM
will be provisioned. The VM's should be set up with passwordless ssh and
passwordless sudo (neither sshing nor running a sudo command should prompt
the user for a password).
All VM specifics are self-contained and the class provides methods to
operate on the VM: boot, shutdown, etc.
"""
import collections
import json
import logging
import threading
from perfkitbenchmarker import disk
from perfkitbenchmarker import flags
from perfkitbenchmarker import linux_virtual_machine
from perfkitbenchmarker import virtual_machine
from perfkitbenchmarker import windows_virtual_machine
WINDOWS = 'windows'
DEBIAN = 'debian'
RHEL = 'rhel'
UBUNTU_CONTAINER = 'ubuntu_container'
FLAGS = flags.FLAGS
class StaticVmSpec(virtual_machine.BaseVmSpec):
"""Object containing all info needed to create a Static VM."""
def __init__(self, ip_address=None, user_name=None, ssh_private_key=None,
internal_ip=None, ssh_port=22, install_packages=True,
password=None, disk_specs=None, os_type=None, **kwargs):
"""Initialize the StaticVmSpec object.
Args:
ip_address: The public ip address of the VM.
user_name: The username of the VM that the keyfile corresponds to.
ssh_private_key: The absolute path to the private keyfile to use to ssh
to the VM.
internal_ip: The internal ip address of the VM.
ssh_port: The port number to use for SSH and SCP commands.
install_packages: If false, no packages will be installed. This is
useful if benchmark dependencies have already been installed.
password: The password used to log into the VM (Windows Only).
disk_specs: A list of dictionaries containing kwargs used to create
disk.BaseDiskSpecs.
os_type: The OS type of the VM. See the flag of the same name for more
information.
"""
super(StaticVmSpec, self).__init__(**kwargs)
self.ip_address = ip_address
self.user_name = user_name
self.ssh_private_key = ssh_private_key
self.internal_ip = internal_ip
self.ssh_port = ssh_port
self.install_packages = install_packages
self.password = password
self.os_type = os_type
self.disk_specs = disk_specs
class StaticDisk(disk.BaseDisk):
"""Object representing a static Disk."""
def _Create(self):
"""StaticDisks don't implement _Create()."""
pass
def _Delete(self):
"""StaticDisks don't implement _Delete()."""
pass
def Attach(self):
"""StaticDisks don't implement Attach()."""
pass
def Detach(self):
"""StaticDisks don't implement Detach()."""
pass
class StaticVirtualMachine(virtual_machine.BaseVirtualMachine):
"""Object representing a Static Virtual Machine."""
is_static = True
vm_pool = collections.deque()
vm_pool_lock = threading.Lock()
def __init__(self, vm_spec):
"""Initialize a static virtual machine.
Args:
vm_spec: A StaticVmSpec object containing arguments.
"""
super(StaticVirtualMachine, self).__init__(vm_spec, None, None)
self.ip_address = vm_spec.ip_address
self.user_name = vm_spec.user_name
self.ssh_private_key = vm_spec.ssh_private_key
self.internal_ip = vm_spec.internal_ip
self.zone = self.zone or ('Static - %s@%s' % (self.user_name,
self.ip_address))<|fim▁hole|> if vm_spec.disk_specs:
for spec in vm_spec.disk_specs:
self.disk_specs.append(disk.BaseDiskSpec(**spec))
self.from_pool = False
def _Create(self):
"""StaticVirtualMachines do not implement _Create()."""
pass
def _Delete(self):
"""Returns the virtual machine to the pool."""
if self.from_pool:
with self.vm_pool_lock:
self.vm_pool.appendleft(self)
def CreateScratchDisk(self, disk_spec):
"""Create a VM's scratch disk.
Args:
disk_spec: virtual_machine.BaseDiskSpec object of the disk.
"""
spec = self.disk_specs[len(self.scratch_disks)]
self.scratch_disks.append(StaticDisk(spec))
def DeleteScratchDisks(self):
"""StaticVirtualMachines do not delete scratch disks."""
pass
def GetLocalDisks(self):
"""Returns a list of local disks on the VM."""
return [disk_spec.device_path
for disk_spec in self.disk_specs if disk_spec.device_path]
@classmethod
def ReadStaticVirtualMachineFile(cls, file_obj):
"""Read a file describing the static VMs to use.
This function will read the static VM information from the provided file,
instantiate VMs corresponding to the info, and add the VMs to the static
VM pool. The provided file should contain a single array in JSON-format.
Each element in the array must be an object with required format:
ip_address: string.
user_name: string.
keyfile_path: string.
ssh_port: integer, optional. Default 22
internal_ip: string, optional.
zone: string, optional.
local_disks: array of strings, optional.
scratch_disk_mountpoints: array of strings, optional
os_type: string, optional (see package_managers)
install_packages: bool, optional
Args:
file_obj: An open handle to a file containing the static VM info.
Raises:
ValueError: On missing required keys, or invalid keys.
"""
vm_arr = json.load(file_obj)
if not isinstance(vm_arr, list):
raise ValueError('Invalid static VM file. Expected array, got: %s.' %
type(vm_arr))
required_keys = frozenset(['ip_address', 'user_name'])
linux_required_keys = required_keys | frozenset(['keyfile_path'])
required_keys_by_os = {
WINDOWS: required_keys | frozenset(['password']),
DEBIAN: linux_required_keys,
RHEL: linux_required_keys,
UBUNTU_CONTAINER: linux_required_keys,
}
required_keys = required_keys_by_os[FLAGS.os_type]
optional_keys = frozenset(['internal_ip', 'zone', 'local_disks',
'scratch_disk_mountpoints', 'os_type',
'ssh_port', 'install_packages'])
allowed_keys = required_keys | optional_keys
def VerifyItemFormat(item):
"""Verify that the decoded JSON object matches the required schema."""
item_keys = frozenset(item)
extra_keys = sorted(item_keys - allowed_keys)
missing_keys = required_keys - item_keys
if extra_keys:
raise ValueError('Unexpected keys: {0}'.format(', '.join(extra_keys)))
elif missing_keys:
raise ValueError('Missing required keys: {0}'.format(
', '.join(missing_keys)))
for item in vm_arr:
VerifyItemFormat(item)
ip_address = item['ip_address']
user_name = item['user_name']
keyfile_path = item.get('keyfile_path')
internal_ip = item.get('internal_ip')
zone = item.get('zone')
local_disks = item.get('local_disks', [])
password = item.get('password')
if not isinstance(local_disks, list):
raise ValueError('Expected a list of local disks, got: {0}'.format(
local_disks))
scratch_disk_mountpoints = item.get('scratch_disk_mountpoints', [])
if not isinstance(scratch_disk_mountpoints, list):
raise ValueError(
'Expected a list of disk mount points, got: {0}'.format(
scratch_disk_mountpoints))
ssh_port = item.get('ssh_port', 22)
os_type = item.get('os_type')
install_packages = item.get('install_packages', True)
if ((os_type == WINDOWS and FLAGS.os_type != WINDOWS) or
(os_type != WINDOWS and FLAGS.os_type == WINDOWS)):
raise ValueError('Please only use Windows VMs when using '
'--os_type=windows and vice versa.')
disk_kwargs_list = []
for path in scratch_disk_mountpoints:
disk_kwargs_list.append({'mount_point': path})
for local_disk in local_disks:
disk_kwargs_list.append({'device_path': local_disk})
vm_spec = StaticVmSpec(
ip_address=ip_address, user_name=user_name, ssh_port=ssh_port,
install_packages=install_packages, ssh_private_key=keyfile_path,
internal_ip=internal_ip, zone=zone, disk_specs=disk_kwargs_list,
password=password)
vm_class = GetStaticVmClass(os_type)
vm = vm_class(vm_spec)
cls.vm_pool.append(vm)
@classmethod
def GetStaticVirtualMachine(cls):
"""Pull a Static VM from the pool of static VMs.
If there are no VMs left in the pool, the method will return None.
Returns:
A static VM from the pool, or None if there are no static VMs left.
"""
with cls.vm_pool_lock:
if cls.vm_pool:
vm = cls.vm_pool.popleft()
vm.from_pool = True
return vm
else:
return None
def GetStaticVmClass(os_type):
"""Returns the static VM class that corresponds to the os_type."""
class_dict = {
DEBIAN: DebianBasedStaticVirtualMachine,
RHEL: RhelBasedStaticVirtualMachine,
WINDOWS: WindowsBasedStaticVirtualMachine,
UBUNTU_CONTAINER: ContainerizedStaticVirtualMachine,
}
if os_type in class_dict:
return class_dict[os_type]
else:
logging.warning('Could not find os type for VM. Defaulting to debian.')
return DebianBasedStaticVirtualMachine
class ContainerizedStaticVirtualMachine(
StaticVirtualMachine, linux_virtual_machine.ContainerizedDebianMixin):
pass
class DebianBasedStaticVirtualMachine(StaticVirtualMachine,
linux_virtual_machine.DebianMixin):
pass
class RhelBasedStaticVirtualMachine(StaticVirtualMachine,
linux_virtual_machine.RhelMixin):
pass
class WindowsBasedStaticVirtualMachine(StaticVirtualMachine,
windows_virtual_machine.WindowsMixin):
pass<|fim▁end|> | self.ssh_port = vm_spec.ssh_port
self.install_packages = vm_spec.install_packages
self.password = vm_spec.password
|
<|file_name|>logger.ts<|end_file_name|><|fim▁begin|>export type LogLevel = 'debug' | 'info' | 'warn' | 'error' | 'profile';
export const LogLevels: LogLevel[] = ['debug', 'info', 'warn', 'error', 'profile'];
export function isLogLevel(arg: any): arg is LogLevel {
if (typeof arg !== "string") {
return false;
}
for (const level of LogLevels) {<|fim▁hole|> }
return false;
}
export interface TaggedLogger {
log(level: LogLevel, ...args: any[]): void;
debug(...args: any[]): void;
info(...args: any[]): void;
warn(...args: any[]): void;
error(...args: any[]): void;
profile(...args: any[]): void;
}
export interface Logger {
tag(...tags: string[]): TaggedLogger;
}<|fim▁end|> | if (level === arg) {
return true;
} |
<|file_name|>userModel.js<|end_file_name|><|fim▁begin|>var Backbone = require('backbone');
module.exports = Backbone.Model.extend({
defaults: {
"name": "",
"email": "",
"phone": ""<|fim▁hole|> },
url: '/users'
});<|fim▁end|> | |
<|file_name|>carbohydrates_on_board.go<|end_file_name|><|fim▁begin|>package dosingdecision
import (
"time"
"github.com/tidepool-org/platform/structure"
)
const (
CarbohydratesOnBoardAmountMaximum = 1000
CarbohydratesOnBoardAmountMinimum = 0
)
type CarbohydratesOnBoard struct {
Time *time.Time `json:"time,omitempty" bson:"time,omitempty"`
Amount *float64 `json:"amount,omitempty" bson:"amount,omitempty"`
}<|fim▁hole|> if !parser.Exists() {
return nil
}
datum := NewCarbohydratesOnBoard()
parser.Parse(datum)
return datum
}
func NewCarbohydratesOnBoard() *CarbohydratesOnBoard {
return &CarbohydratesOnBoard{}
}
func (c *CarbohydratesOnBoard) Parse(parser structure.ObjectParser) {
c.Time = parser.Time("time", time.RFC3339Nano)
c.Amount = parser.Float64("amount")
}
func (c *CarbohydratesOnBoard) Validate(validator structure.Validator) {
validator.Float64("amount", c.Amount).Exists().InRange(CarbohydratesOnBoardAmountMinimum, CarbohydratesOnBoardAmountMaximum)
}<|fim▁end|> |
func ParseCarbohydratesOnBoard(parser structure.ObjectParser) *CarbohydratesOnBoard { |
<|file_name|>transformer.cpp<|end_file_name|><|fim▁begin|>/*
*** Transformer
*** src/base/transformer.cpp
Copyright T. Youngs 2013-2015
This file is part of uChroma.
uChroma is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
uChroma is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with uChroma. If not, see <http://www.gnu.org/licenses/>.
*/
#include "base/transformer.h"
#include "expression/variable.h"
#include "templates/array.h"
// Constructor
Transformer::Transformer()
{
// Add permanent variable trio to equation
x_ = equation_.createVariable("x", NULL, true);
y_ = equation_.createVariable("y", NULL, true);
z_ = equation_.createVariable("z", NULL, true);
valid_ = false;
}
// Destructor
Transformer::~Transformer()
{
}
// Copy constructor
Transformer::Transformer(const Transformer& source)
{
(*this) = source;
}
// Assignment operator
void Transformer::operator=(const Transformer& source)
{
// Set equation from old expression
setEquation(source.text_);
enabled_ = source.enabled_;
}
// Set whether transform is enabled
void Transformer::setEnabled(bool b)
{
enabled_ = b;
}
// Return whether transform is enabled
bool Transformer::enabled()
{
return enabled_;
}
// Set equation, returning if Tree construction was successful
bool Transformer::setEquation(QString equation)
{
text_ = equation;
valid_ = equation_.generate(equation);
return valid_;
}
// Return text used to generate last equation_
QString Transformer::text()
{
return text_;
}
// Return whether current equation is valid
bool Transformer::valid()
{
return valid_;
}
// Transform single value
double Transformer::transform(double x, double y, double z)
{
// If equation is not valid, just return
if (!valid_)
{
msg.print("Equation is not valid, so returning 0.0.\n");
return 0.0;
}
x_->set(x);
y_->set(y);
z_->set(z);
bool success;
return equation_.execute(success);
}
// Transform whole array, including application of pre/post transform shift<|fim▁hole|> // If transform is not enabled, return original array
if (!enabled_) return (target == 0 ? sourceX : sourceY);
// If equation is not valid, just return original array
if (!valid_)
{
msg.print("Equation is not valid, so returning original array.\n");
return (target == 0 ? sourceX : sourceY);
}
if (sourceX.nItems() != sourceY.nItems())
{
msg.print("Error in Transformer::transformArray() - x and y array sizes do not match.\n");
return Array<double>();
}
// Create new array, and create reference to target array
Array<double> newArray(sourceX.nItems());
z_->set(z);
bool success;
// Loop over x points
for (int n=0; n<sourceX.nItems(); ++n)
{
// Set x and y values in equation
x_->set(sourceX[n]);
y_->set(sourceY[n]);
newArray[n] = equation_.execute(success);
if (!success) break;
}
return newArray;
}<|fim▁end|> | Array<double> Transformer::transformArray(Array<double> sourceX, Array<double> sourceY, double z, int target)
{ |
<|file_name|>sse.cc<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////6////////////////////////
// $Id: sse.cc 11984 2013-12-01 22:21:55Z sshwarts $
/////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2003-2013 Stanislav Shwartsman
// Written by Stanislav Shwartsman [sshwarts at sourceforge net]
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA B 02110-1301 USA
//
/////////////////////////////////////////////////////////////////////////
#define NEED_CPU_REG_SHORTCUTS 1
#include "bochs.h"
#include "cpu.h"
#define LOG_THIS BX_CPU_THIS_PTR
/* ********************************************** */
/* SSE Integer Operations (128bit MMX extensions) */
/* ********************************************** */
#if BX_CPU_LEVEL >= 6
#include "simd_int.h"
#include "simd_compare.h"
#define SSE_2OP(HANDLER, func) \
/* SSE instruction with two src operands */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C :: HANDLER (bxInstruction_c *i) \
{ \
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst()), op2 = BX_READ_XMM_REG(i->src()); \
(func)(&op1, &op2); \
BX_WRITE_XMM_REG(i->dst(), op1); \
\
BX_NEXT_INSTR(i); \
}
SSE_2OP(PHADDW_VdqWdqR, xmm_phaddw)
SSE_2OP(PHADDSW_VdqWdqR, xmm_phaddsw)
SSE_2OP(PHADDD_VdqWdqR, xmm_phaddd)
SSE_2OP(PHSUBW_VdqWdqR, xmm_phsubw)
SSE_2OP(PHSUBSW_VdqWdqR, xmm_phsubsw)
SSE_2OP(PHSUBD_VdqWdqR, xmm_phsubd)
SSE_2OP(PSIGNB_VdqWdqR, xmm_psignb)
SSE_2OP(PSIGNW_VdqWdqR, xmm_psignw)
SSE_2OP(PSIGND_VdqWdqR, xmm_psignd)
SSE_2OP(PCMPEQQ_VdqWdqR, xmm_pcmpeqq)
SSE_2OP(PCMPGTQ_VdqWdqR, xmm_pcmpgtq)
SSE_2OP(PMINSB_VdqWdqR, xmm_pminsb)
SSE_2OP(PMINSD_VdqWdqR, xmm_pminsd)
SSE_2OP(PMINUW_VdqWdqR, xmm_pminuw)
SSE_2OP(PMINUD_VdqWdqR, xmm_pminud)
SSE_2OP(PMAXSB_VdqWdqR, xmm_pmaxsb)
SSE_2OP(PMAXSD_VdqWdqR, xmm_pmaxsd)
SSE_2OP(PMAXUW_VdqWdqR, xmm_pmaxuw)
SSE_2OP(PMAXUD_VdqWdqR, xmm_pmaxud)
SSE_2OP(PACKUSDW_VdqWdqR, xmm_packusdw)
SSE_2OP(PMULLD_VdqWdqR, xmm_pmulld)
SSE_2OP(PMULDQ_VdqWdqR, xmm_pmuldq)
SSE_2OP(PMULHRSW_VdqWdqR, xmm_pmulhrsw)
SSE_2OP(PMADDUBSW_VdqWdqR, xmm_pmaddubsw)
#endif // BX_CPU_LEVEL >= 6
#if BX_CPU_LEVEL >= 6
#define SSE_2OP_CPU_LEVEL6(HANDLER, func) \
SSE_2OP(HANDLER, func)
#else
#define SSE_2OP_CPU_LEVEL6(HANDLER, func) \
/* SSE instruction with two src operands */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C :: HANDLER (bxInstruction_c *i) \
{ \
BX_NEXT_INSTR(i); \
}
#endif
SSE_2OP_CPU_LEVEL6(PMINUB_VdqWdqR, xmm_pminub)
SSE_2OP_CPU_LEVEL6(PMINSW_VdqWdqR, xmm_pminsw)
SSE_2OP_CPU_LEVEL6(PMAXUB_VdqWdqR, xmm_pmaxub)
SSE_2OP_CPU_LEVEL6(PMAXSW_VdqWdqR, xmm_pmaxsw)
SSE_2OP_CPU_LEVEL6(PAVGB_VdqWdqR, xmm_pavgb)
SSE_2OP_CPU_LEVEL6(PAVGW_VdqWdqR, xmm_pavgw)
SSE_2OP_CPU_LEVEL6(PCMPEQB_VdqWdqR, xmm_pcmpeqb)
SSE_2OP_CPU_LEVEL6(PCMPEQW_VdqWdqR, xmm_pcmpeqw)
SSE_2OP_CPU_LEVEL6(PCMPEQD_VdqWdqR, xmm_pcmpeqd)
SSE_2OP_CPU_LEVEL6(PCMPGTB_VdqWdqR, xmm_pcmpgtb)
SSE_2OP_CPU_LEVEL6(PCMPGTW_VdqWdqR, xmm_pcmpgtw)
SSE_2OP_CPU_LEVEL6(PCMPGTD_VdqWdqR, xmm_pcmpgtd)
SSE_2OP_CPU_LEVEL6(ANDPS_VpsWpsR, xmm_andps)
SSE_2OP_CPU_LEVEL6(ANDNPS_VpsWpsR, xmm_andnps)
SSE_2OP_CPU_LEVEL6(ORPS_VpsWpsR, xmm_orps)
SSE_2OP_CPU_LEVEL6(XORPS_VpsWpsR, xmm_xorps)
SSE_2OP_CPU_LEVEL6(PSUBB_VdqWdqR, xmm_psubb)
SSE_2OP_CPU_LEVEL6(PSUBW_VdqWdqR, xmm_psubw)
SSE_2OP_CPU_LEVEL6(PSUBD_VdqWdqR, xmm_psubd)
SSE_2OP_CPU_LEVEL6(PSUBQ_VdqWdqR, xmm_psubq)
SSE_2OP_CPU_LEVEL6(PADDB_VdqWdqR, xmm_paddb)
SSE_2OP_CPU_LEVEL6(PADDW_VdqWdqR, xmm_paddw)
SSE_2OP_CPU_LEVEL6(PADDD_VdqWdqR, xmm_paddd)
SSE_2OP_CPU_LEVEL6(PADDQ_VdqWdqR, xmm_paddq)
SSE_2OP_CPU_LEVEL6(PSUBSB_VdqWdqR, xmm_psubsb)
SSE_2OP_CPU_LEVEL6(PSUBUSB_VdqWdqR, xmm_psubusb)
SSE_2OP_CPU_LEVEL6(PSUBSW_VdqWdqR, xmm_psubsw)
SSE_2OP_CPU_LEVEL6(PSUBUSW_VdqWdqR, xmm_psubusw)
SSE_2OP_CPU_LEVEL6(PADDSB_VdqWdqR, xmm_paddsb)
SSE_2OP_CPU_LEVEL6(PADDUSB_VdqWdqR, xmm_paddusb)
SSE_2OP_CPU_LEVEL6(PADDSW_VdqWdqR, xmm_paddsw)
SSE_2OP_CPU_LEVEL6(PADDUSW_VdqWdqR, xmm_paddusw)
SSE_2OP_CPU_LEVEL6(PACKUSWB_VdqWdqR, xmm_packuswb)
SSE_2OP_CPU_LEVEL6(PACKSSWB_VdqWdqR, xmm_packsswb)
SSE_2OP_CPU_LEVEL6(PACKSSDW_VdqWdqR, xmm_packssdw)
SSE_2OP_CPU_LEVEL6(UNPCKLPS_VpsWpsR, xmm_unpcklps)
SSE_2OP_CPU_LEVEL6(UNPCKHPS_VpsWpsR, xmm_unpckhps)
SSE_2OP_CPU_LEVEL6(PUNPCKLQDQ_VdqWdqR, xmm_unpcklpd)
SSE_2OP_CPU_LEVEL6(PUNPCKHQDQ_VdqWdqR, xmm_unpckhpd)
SSE_2OP_CPU_LEVEL6(PUNPCKLBW_VdqWdqR, xmm_punpcklbw)
SSE_2OP_CPU_LEVEL6(PUNPCKLWD_VdqWdqR, xmm_punpcklwd)
SSE_2OP_CPU_LEVEL6(PUNPCKHBW_VdqWdqR, xmm_punpckhbw)
SSE_2OP_CPU_LEVEL6(PUNPCKHWD_VdqWdqR, xmm_punpckhwd)
SSE_2OP_CPU_LEVEL6(PMULLW_VdqWdqR, xmm_pmullw)
SSE_2OP_CPU_LEVEL6(PMULHW_VdqWdqR, xmm_pmulhw)
SSE_2OP_CPU_LEVEL6(PMULHUW_VdqWdqR, xmm_pmulhuw)
SSE_2OP_CPU_LEVEL6(PMULUDQ_VdqWdqR, xmm_pmuludq)
SSE_2OP_CPU_LEVEL6(PMADDWD_VdqWdqR, xmm_pmaddwd)
SSE_2OP_CPU_LEVEL6(PSADBW_VdqWdqR, xmm_psadbw)
#if BX_CPU_LEVEL >= 6
#define SSE_1OP(HANDLER, func) \
/* SSE instruction with single src operand */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C :: HANDLER (bxInstruction_c *i) \
{ \
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src()); \
(func)(&op); \
BX_WRITE_XMM_REG(i->dst(), op); \
\
BX_NEXT_INSTR(i); \
}
SSE_1OP(PABSB_VdqWdqR, xmm_pabsb)
SSE_1OP(PABSW_VdqWdqR, xmm_pabsw)
SSE_1OP(PABSD_VdqWdqR, xmm_pabsd)
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PSHUFB_VdqWdqR(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst());
BxPackedXmmRegister op2 = BX_READ_XMM_REG(i->src()), result;
xmm_pshufb(&result, &op1, &op2);
BX_WRITE_XMM_REG(i->dst(), result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PBLENDVB_VdqWdqR(bxInstruction_c *i)
{
xmm_pblendvb(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), &BX_XMM_REG(0));
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::BLENDVPS_VpsWpsR(bxInstruction_c *i)
{
xmm_blendvps(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), &BX_XMM_REG(0));
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::BLENDVPD_VpdWpdR(bxInstruction_c *i)
{
xmm_blendvpd(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), &BX_XMM_REG(0));
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PTEST_VdqWdqR(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst()), op2 = BX_READ_XMM_REG(i->src());
unsigned result = 0;
if ((op2.xmm64u(0) & op1.xmm64u(0)) == 0 &&
(op2.xmm64u(1) & op1.xmm64u(1)) == 0) result |= EFlagsZFMask;
if ((op2.xmm64u(0) & ~op1.xmm64u(0)) == 0 &&
(op2.xmm64u(1) & ~op1.xmm64u(1)) == 0) result |= EFlagsCFMask;
setEFlagsOSZAPC(result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PHMINPOSUW_VdqWdqR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
unsigned min = 0;
for (unsigned j=1; j < 8; j++) {
if (op.xmm16u(j) < op.xmm16u(min)) min = j;
}
op.xmm16u(0) = op.xmm16u(min);
op.xmm16u(1) = min;
op.xmm32u(1) = 0;
op.xmm64u(1) = 0;
BX_WRITE_XMM_REGZ(i->dst(), op, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::BLENDPS_VpsWpsIbR(bxInstruction_c *i)
{
xmm_blendps(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), i->Ib());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::BLENDPD_VpdWpdIbR(bxInstruction_c *i)
{
xmm_blendpd(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), i->Ib());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PBLENDW_VdqWdqIbR(bxInstruction_c *i)
{
xmm_pblendw(&BX_XMM_REG(i->dst()), &BX_XMM_REG(i->src()), i->Ib());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRB_EbdVdqIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit8u result = op.xmmubyte(i->Ib() & 0xF);
BX_WRITE_32BIT_REGZ(i->dst(), (Bit32u) result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRB_EbdVdqIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit8u result = op.xmmubyte(i->Ib() & 0xF);
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
write_virtual_byte(i->seg(), eaddr, result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRW_EwdVdqIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit16u result = op.xmm16u(i->Ib() & 7);
BX_WRITE_32BIT_REGZ(i->dst(), (Bit32u) result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRW_EwdVdqIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit16u result = op.xmm16u(i->Ib() & 7);
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
write_virtual_word(i->seg(), eaddr, result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRD_EdVdqIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
#if BX_SUPPORT_X86_64
if (i->os64L()) /* 64 bit operand size mode */
{
Bit64u result = op.xmm64u(i->Ib() & 1);
BX_WRITE_64BIT_REG(i->dst(), result);
}
else
#endif
{
Bit32u result = op.xmm32u(i->Ib() & 3);
BX_WRITE_32BIT_REGZ(i->dst(), result);
}
BX_NEXT_INSTR(i);
}
#if BX_SUPPORT_X86_64
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRQ_EqVdqIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit64u result = op.xmm64u(i->Ib() & 1);
BX_WRITE_64BIT_REG(i->dst(), result);
BX_NEXT_INSTR(i);
}
#endif
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRD_EdVdqIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
#if BX_SUPPORT_X86_64
if (i->os64L()) /* 64 bit operand size mode */
{
Bit64u result = op.xmm64u(i->Ib() & 1);
write_virtual_qword_64(i->seg(), eaddr, result);
}
else
#endif
{
Bit32u result = op.xmm32u(i->Ib() & 3);
write_virtual_dword(i->seg(), eaddr, result);
}
BX_NEXT_INSTR(i);
}
#if BX_SUPPORT_X86_64
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRQ_EqVdqIbM(bxInstruction_c *i)
{
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit64u result = op.xmm64u(i->Ib() & 1);
write_virtual_qword_64(i->seg(), eaddr, result);
BX_NEXT_INSTR(i);
}
#endif
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::EXTRACTPS_EdVpsIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit32u result = op.xmm32u(i->Ib() & 3);
BX_WRITE_32BIT_REGZ(i->dst(), result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::EXTRACTPS_EdVpsIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit32u result = op.xmm32u(i->Ib() & 3);
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
write_virtual_dword(i->seg(), eaddr, result);
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PINSRB_VdqHdqEbIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
op1.xmmubyte(i->Ib() & 0xF) = BX_READ_8BIT_REGL(i->src2()); // won't allow reading of AH/CH/BH/DH
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PINSRB_VdqHdqEbIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
op1.xmmubyte(i->Ib() & 0xF) = read_virtual_byte(i->seg(), eaddr);
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::INSERTPS_VpsHpsWssIb(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
Bit8u control = i->Ib();
Bit32u op2;
/* op2 is a register or memory reference */
if (i->modC0()) {
BxPackedXmmRegister temp = BX_READ_XMM_REG(i->src2());
op2 = temp.xmm32u((control >> 6) & 3);
}
else {
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
op2 = read_virtual_dword(i->seg(), eaddr);
}
op1.xmm32u((control >> 4) & 3) = op2;
if (control & 1) op1.xmm32u(0) = 0;
if (control & 2) op1.xmm32u(1) = 0;
if (control & 4) op1.xmm32u(2) = 0;
if (control & 8) op1.xmm32u(3) = 0;
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PINSRD_VdqHdqEdIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
#if BX_SUPPORT_X86_64
if (i->os64L()) { /* 64 bit operand size mode */
op1.xmm64u(i->Ib() & 1) = BX_READ_64BIT_REG(i->src2());
}
else
#endif<|fim▁hole|>
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PINSRD_VdqHdqEdIbM(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
bx_address eaddr = BX_CPU_CALL_METHODR(i->ResolveModrm, (i));
#if BX_SUPPORT_X86_64
if (i->os64L()) { /* 64 bit operand size mode */
Bit64u op2 = read_virtual_qword_64(i->seg(), eaddr);
op1.xmm64u(i->Ib() & 1) = op2;
}
else
#endif
{
Bit32u op2 = read_virtual_dword(i->seg(), eaddr);
op1.xmm32u(i->Ib() & 3) = op2;
}
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::MPSADBW_VdqWdqIbR(bxInstruction_c *i)
{
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst());
BxPackedXmmRegister op2 = BX_READ_XMM_REG(i->src()), result;
xmm_mpsadbw(&result, &op1, &op2, i->Ib() & 0x7);
BX_WRITE_XMM_REG(i->dst(), result);
BX_NEXT_INSTR(i);
}
#endif // BX_CPU_LEVEL >= 6
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PSHUFD_VdqWdqIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src()), result;
xmm_shufps(&result, &op, &op, i->Ib());
BX_WRITE_XMM_REG(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PSHUFHW_VdqWdqIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src()), result;
xmm_pshufhw(&result, &op, i->Ib());
BX_WRITE_XMM_REG(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PSHUFLW_VdqWdqIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src()), result;
xmm_pshuflw(&result, &op, i->Ib());
BX_WRITE_XMM_REG(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PINSRW_VdqHdqEwIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->src1());
Bit8u count = i->Ib() & 0x7;
op1.xmm16u(count) = BX_READ_16BIT_REG(i->src2());
BX_WRITE_XMM_REGZ(i->dst(), op1, i->getVL());
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::PEXTRW_GdUdqIb(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op = BX_READ_XMM_REG(i->src());
Bit8u count = i->Ib() & 0x7;
Bit32u result = (Bit32u) op.xmm16u(count);
BX_WRITE_32BIT_REGZ(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::SHUFPS_VpsWpsIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst());
BxPackedXmmRegister op2 = BX_READ_XMM_REG(i->src()), result;
xmm_shufps(&result, &op1, &op2, i->Ib());
BX_WRITE_XMM_REG(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::SHUFPD_VpdWpdIbR(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister op1 = BX_READ_XMM_REG(i->dst());
BxPackedXmmRegister op2 = BX_READ_XMM_REG(i->src()), result;
xmm_shufpd(&result, &op1, &op2, i->Ib());
BX_WRITE_XMM_REG(i->dst(), result);
#endif
BX_NEXT_INSTR(i);
}
#if BX_CPU_LEVEL >= 6
#define SSE_PSHIFT_CPU_LEVEL6(HANDLER, func) \
/* SSE packed shift instruction */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C:: HANDLER (bxInstruction_c *i) \
{ \
BxPackedXmmRegister op = BX_READ_XMM_REG(i->dst()); \
\
(func)(&op, BX_READ_XMM_REG_LO_QWORD(i->src())); \
\
BX_WRITE_XMM_REG(i->dst(), op); \
\
BX_NEXT_INSTR(i); \
}
#else
#define SSE_PSHIFT_CPU_LEVEL6(HANDLER, func) \
/* SSE instruction with two src operands */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C :: HANDLER (bxInstruction_c *i) \
{ \
BX_NEXT_INSTR(i); \
}
#endif
SSE_PSHIFT_CPU_LEVEL6(PSRLW_VdqWdqR, xmm_psrlw);
SSE_PSHIFT_CPU_LEVEL6(PSRLD_VdqWdqR, xmm_psrld);
SSE_PSHIFT_CPU_LEVEL6(PSRLQ_VdqWdqR, xmm_psrlq);
SSE_PSHIFT_CPU_LEVEL6(PSRAW_VdqWdqR, xmm_psraw);
SSE_PSHIFT_CPU_LEVEL6(PSRAD_VdqWdqR, xmm_psrad);
SSE_PSHIFT_CPU_LEVEL6(PSLLW_VdqWdqR, xmm_psllw);
SSE_PSHIFT_CPU_LEVEL6(PSLLD_VdqWdqR, xmm_pslld);
SSE_PSHIFT_CPU_LEVEL6(PSLLQ_VdqWdqR, xmm_psllq);
#if BX_CPU_LEVEL >= 6
#define SSE_PSHIFT_IMM_CPU_LEVEL6(HANDLER, func) \
/* SSE packed shift with imm8 instruction */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C:: HANDLER (bxInstruction_c *i) \
{ \
(func)(&BX_XMM_REG(i->dst()), i->Ib()); \
\
BX_NEXT_INSTR(i); \
}
#else
#define SSE_PSHIFT_IMM_CPU_LEVEL6(HANDLER, func) \
/* SSE packed shift with imm8 instruction */ \
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C :: HANDLER (bxInstruction_c *i) \
{ \
BX_NEXT_INSTR(i); \
}
#endif
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRLW_UdqIb, xmm_psrlw);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRLD_UdqIb, xmm_psrld);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRLQ_UdqIb, xmm_psrlq);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRAW_UdqIb, xmm_psraw);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRAD_UdqIb, xmm_psrad);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSLLW_UdqIb, xmm_psllw);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSLLD_UdqIb, xmm_pslld);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSLLQ_UdqIb, xmm_psllq);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSRLDQ_UdqIb, xmm_psrldq);
SSE_PSHIFT_IMM_CPU_LEVEL6(PSLLDQ_UdqIb, xmm_pslldq);
/* ************************ */
/* SSE4A (AMD) INSTRUCTIONS */
/* ************************ */
#if BX_CPU_LEVEL >= 6
BX_CPP_INLINE Bit64u xmm_extrq(Bit64u src, unsigned shift, unsigned len)
{
len &= 0x3f;
shift &= 0x3f;
src >>= shift;
if (len) {
Bit64u mask = (BX_CONST64(1) << len) - 1;
return src & mask;
}
return src;
}
BX_CPP_INLINE Bit64u xmm_insertq(Bit64u dest, Bit64u src, unsigned shift, unsigned len)
{
Bit64u mask;
len &= 0x3f;
shift &= 0x3f;
if (len == 0) {
mask = BX_CONST64(0xffffffffffffffff);
} else {
mask = (BX_CONST64(1) << len) - 1;
}
return (dest & ~(mask << shift)) | ((src & mask) << shift);
}
#endif
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::EXTRQ_UdqIbIb(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BX_WRITE_XMM_REG_LO_QWORD(i->dst(), xmm_extrq(BX_READ_XMM_REG_LO_QWORD(i->dst()), i->Ib2(), i->Ib()));
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::EXTRQ_VdqUq(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
Bit16u ctrl = BX_READ_XMM_REG_LO_WORD(i->src());
BX_WRITE_XMM_REG_LO_QWORD(i->dst(), xmm_extrq(BX_READ_XMM_REG_LO_QWORD(i->dst()), ctrl >> 8, ctrl));
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::INSERTQ_VdqUqIbIb(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
Bit64u dst = BX_READ_XMM_REG_LO_QWORD(i->dst()), src = BX_READ_XMM_REG_LO_QWORD(i->src());
BX_WRITE_XMM_REG_LO_QWORD(i->dst(), xmm_insertq(dst, src, i->Ib2(), i->Ib()));
#endif
BX_NEXT_INSTR(i);
}
BX_INSF_TYPE BX_CPP_AttrRegparmN(1) BX_CPU_C::INSERTQ_VdqUdq(bxInstruction_c *i)
{
#if BX_CPU_LEVEL >= 6
BxPackedXmmRegister src = BX_READ_XMM_REG(i->src());
Bit64u dst = BX_READ_XMM_REG_LO_QWORD(i->dst());
BX_WRITE_XMM_REG_LO_QWORD(i->dst(), xmm_insertq(dst, src.xmm64u(0), src.xmmubyte(9), src.xmmubyte(8)));
#endif
BX_NEXT_INSTR(i);
}<|fim▁end|> | {
op1.xmm32u(i->Ib() & 3) = BX_READ_32BIT_REG(i->src2());
} |
<|file_name|>FrenchDeck.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
<|fim▁hole|> suits = 'spades diamonds clubs hearts'.split()
def __init__(self):
self._cards = [Card(rank, suit) for suit in self.suits
for rank in self.ranks]
def __len__(self):
return len(self._cards)
def __getitem__(self, position):
return self._cards[position]<|fim▁end|> | Card = collections.namedtuple('Card', ['rank', 'suit'])
class FrenchDeck:
ranks = [str(n) for n in range(2, 11)] + list('JQKA') |
<|file_name|>interceptors_client.go<|end_file_name|><|fim▁begin|>package gocsi
import (
"golang.org/x/net/context"
"google.golang.org/grpc"
"github.com/thecodeteam/gocsi/csi"
)
// ChainUnaryClient chains one or more unary, client interceptors
// together into a left-to-right series that can be provided to a
// new gRPC client.
func ChainUnaryClient(
i ...grpc.UnaryClientInterceptor) grpc.UnaryClientInterceptor {
switch len(i) {
case 0:
return func(
ctx context.Context,
method string,
req, rep interface{},
cc *grpc.ClientConn,
invoker grpc.UnaryInvoker,
opts ...grpc.CallOption) error {
return invoker(ctx, method, req, rep, cc, opts...)
}
case 1:
return i[0]
}
return func(
ctx context.Context,
method string,
req, rep interface{},
cc *grpc.ClientConn,
invoker grpc.UnaryInvoker,
opts ...grpc.CallOption) error {
bc := func(
cur grpc.UnaryClientInterceptor,
nxt grpc.UnaryInvoker) grpc.UnaryInvoker {
return func(
curCtx context.Context,
curMethod string,
curReq, curRep interface{},
curCC *grpc.ClientConn,
curOpts ...grpc.CallOption) error {
return cur(
curCtx,
curMethod,
curReq, curRep,
curCC, nxt,
curOpts...)
}
}
c := invoker
for j := len(i) - 1; j >= 0; j-- {
c = bc(i[j], c)
}
return c(ctx, method, req, rep, cc, opts...)
}
}
// ClientCheckReponseError is a unary, client validator that checks a
// reply's message to see if it contains an error and transforms it
// into an *Error object, which adheres to Go's Error interface.
func ClientCheckReponseError(
ctx context.Context,
method string,
req, rep interface{},
cc *grpc.ClientConn,
invoker grpc.UnaryInvoker,
opts ...grpc.CallOption) error {
// Invoke the call and check the reply for an error.
if err := invoker(ctx, method, req, rep, cc, opts...); err != nil {
return &Error{
FullMethod: method,
InnerError: err,
}
}
switch trep := rep.(type) {
// Controller
case *csi.CreateVolumeResponse:
if err := CheckResponseErrCreateVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.DeleteVolumeResponse:
if err := CheckResponseErrDeleteVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerPublishVolumeResponse:
if err := CheckResponseErrControllerPublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerUnpublishVolumeResponse:
if err := CheckResponseErrControllerUnpublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ValidateVolumeCapabilitiesResponse:
if err := CheckResponseErrValidateVolumeCapabilities(
ctx, method, trep); err != nil {
return err
}
case *csi.ListVolumesResponse:
if err := CheckResponseErrListVolumes(
ctx, method, trep); err != nil {
return err
}
case *csi.GetCapacityResponse:
if err := CheckResponseErrGetCapacity(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerGetCapabilitiesResponse:
if err := CheckResponseErrControllerGetCapabilities(
ctx, method, trep); err != nil {
return err
}
// Identity
case *csi.GetSupportedVersionsResponse:
if err := CheckResponseErrGetSupportedVersions(
ctx, method, trep); err != nil {
return err
}
case *csi.GetPluginInfoResponse:
if err := CheckResponseErrGetPluginInfo(
ctx, method, trep); err != nil {
return err
}
// Node
case *csi.NodePublishVolumeResponse:
if err := CheckResponseErrNodePublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.NodeUnpublishVolumeResponse:
if err := CheckResponseErrNodeUnpublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.GetNodeIDResponse:
if err := CheckResponseErrGetNodeID(
ctx, method, trep); err != nil {
return err
}
case *csi.ProbeNodeResponse:
if err := CheckResponseErrProbeNode(
ctx, method, trep); err != nil {
return err
}
case *csi.NodeGetCapabilitiesResponse:
if err := CheckResponseErrNodeGetCapabilities(
ctx, method, trep); err != nil {
return err
}
}
return nil
}
// ClientResponseValidator is a unary, client validator for validating
// replies from a CSI plug-in.
func ClientResponseValidator(
ctx context.Context,
method string,
req, rep interface{},
cc *grpc.ClientConn,
invoker grpc.UnaryInvoker,
opts ...grpc.CallOption) error {
// Invoke the call and validate the reply.
if err := invoker(ctx, method, req, rep, cc, opts...); err != nil {
return &Error{
FullMethod: method,
InnerError: err,
}
}
// Do not validate the reply if it has an error.
if trep, ok := rep.(hasGetError); ok {
if trep.GetError() != nil {
return nil
}
}
<|fim▁hole|> // Controller
case *csi.CreateVolumeResponse:
if err := crepvCreateVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.DeleteVolumeResponse:
if err := crepvDeleteVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerPublishVolumeResponse:
if err := crepvControllerPublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerUnpublishVolumeResponse:
if err := crepvControllerUnpublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.ValidateVolumeCapabilitiesResponse:
if err := crepvValidateVolumeCapabilities(
ctx, method, trep); err != nil {
return err
}
case *csi.ListVolumesResponse:
if err := crepvListVolumes(
ctx, method, trep); err != nil {
return err
}
case *csi.GetCapacityResponse:
if err := crepvGetCapacity(
ctx, method, trep); err != nil {
return err
}
case *csi.ControllerGetCapabilitiesResponse:
if err := crepvControllerGetCapabilities(
ctx, method, trep); err != nil {
return err
}
// Identity
case *csi.GetSupportedVersionsResponse:
if err := crepvGetSupportedVersions(
ctx, method, trep); err != nil {
return err
}
case *csi.GetPluginInfoResponse:
if err := crepvGetPluginInfo(
ctx, method, trep); err != nil {
return err
}
// Node
case *csi.NodePublishVolumeResponse:
if err := crepvNodePublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.NodeUnpublishVolumeResponse:
if err := crepvNodeUnpublishVolume(
ctx, method, trep); err != nil {
return err
}
case *csi.GetNodeIDResponse:
if err := crepvGetNodeID(
ctx, method, trep); err != nil {
return err
}
case *csi.ProbeNodeResponse:
if err := crepvProbeNode(
ctx, method, trep); err != nil {
return err
}
case *csi.NodeGetCapabilitiesResponse:
if err := crepvNodeGetCapabilities(
ctx, method, trep); err != nil {
return err
}
}
return nil
}
////////////////////////////////////////////////////////////////////////////////
// CLIENT RESPONSE - CONTROLLER //
////////////////////////////////////////////////////////////////////////////////
func crepvCreateVolume(
ctx context.Context,
method string,
rep *csi.CreateVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
if rep.GetResult().VolumeInfo == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilVolumeInfo,
}
}
if rep.GetResult().VolumeInfo.Id == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilVolumeID,
}
}
return nil
}
func crepvDeleteVolume(
ctx context.Context,
method string,
rep *csi.DeleteVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvControllerPublishVolume(
ctx context.Context,
method string,
rep *csi.ControllerPublishVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
if rep.GetResult().PublishVolumeInfo == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilPublishVolumeInfo,
}
}
if len(rep.GetResult().PublishVolumeInfo.Values) == 0 {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrEmptyPublishVolumeInfo,
}
}
return nil
}
func crepvControllerUnpublishVolume(
ctx context.Context,
method string,
rep *csi.ControllerUnpublishVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvValidateVolumeCapabilities(
ctx context.Context,
method string,
rep *csi.ValidateVolumeCapabilitiesResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvListVolumes(
ctx context.Context,
method string,
rep *csi.ListVolumesResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvGetCapacity(
ctx context.Context,
method string,
rep *csi.GetCapacityResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvControllerGetCapabilities(
ctx context.Context,
method string,
rep *csi.ControllerGetCapabilitiesResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
////////////////////////////////////////////////////////////////////////////////
// CLIENT RESPONSE - IDENTITY //
////////////////////////////////////////////////////////////////////////////////
func crepvGetSupportedVersions(
ctx context.Context,
method string,
rep *csi.GetSupportedVersionsResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvGetPluginInfo(
ctx context.Context,
method string,
rep *csi.GetPluginInfoResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
////////////////////////////////////////////////////////////////////////////////
// CLIENT RESPONSE - NODE //
////////////////////////////////////////////////////////////////////////////////
func crepvNodePublishVolume(
ctx context.Context,
method string,
rep *csi.NodePublishVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvNodeUnpublishVolume(
ctx context.Context,
method string,
rep *csi.NodeUnpublishVolumeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvGetNodeID(
ctx context.Context,
method string,
rep *csi.GetNodeIDResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
if rep.GetResult().NodeId == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilNodeID,
}
}
return nil
}
func crepvProbeNode(
ctx context.Context,
method string,
rep *csi.ProbeNodeResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}
func crepvNodeGetCapabilities(
ctx context.Context,
method string,
rep *csi.NodeGetCapabilitiesResponse) error {
if rep.GetResult() == nil {
return &Error{
Code: ErrorNoCode,
FullMethod: method,
InnerError: ErrNilResult,
}
}
return nil
}<|fim▁end|> | switch trep := rep.(type) {
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import copy
import warnings
from itertools import chain
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
from haystack import fields as haystack_fields
from haystack.query import EmptySearchQuerySet
from haystack.utils import Highlighter<|fim▁hole|>from rest_framework.utils.field_mapping import ClassLookupDict, get_field_kwargs
from .fields import (
HaystackBooleanField, HaystackCharField, HaystackDateField, HaystackDateTimeField,
HaystackDecimalField, HaystackFloatField, HaystackIntegerField
)
class HaystackSerializer(serializers.Serializer):
"""
A `HaystackSerializer` which populates fields based on
which models that are available in the SearchQueryset.
"""
_field_mapping = ClassLookupDict({
haystack_fields.BooleanField: HaystackBooleanField,
haystack_fields.CharField: HaystackCharField,
haystack_fields.DateField: HaystackDateField,
haystack_fields.DateTimeField: HaystackDateTimeField,
haystack_fields.DecimalField: HaystackDecimalField,
haystack_fields.EdgeNgramField: HaystackCharField,
haystack_fields.FacetBooleanField: HaystackBooleanField,
haystack_fields.FacetCharField: HaystackCharField,
haystack_fields.FacetDateField: HaystackDateField,
haystack_fields.FacetDateTimeField: HaystackDateTimeField,
haystack_fields.FacetDecimalField: HaystackDecimalField,
haystack_fields.FacetFloatField: HaystackFloatField,
haystack_fields.FacetIntegerField: HaystackIntegerField,
haystack_fields.FacetMultiValueField: HaystackCharField,
haystack_fields.FloatField: HaystackFloatField,
haystack_fields.IntegerField: HaystackIntegerField,
haystack_fields.LocationField: HaystackCharField,
haystack_fields.MultiValueField: HaystackCharField,
haystack_fields.NgramField: HaystackCharField,
})
def __init__(self, instance=None, data=empty, **kwargs):
super(HaystackSerializer, self).__init__(instance, data, **kwargs)
try:
if not hasattr(self.Meta, "index_classes") and not hasattr(self.Meta, "serializers"):
raise ImproperlyConfigured("You must set either the 'index_classes' or 'serializers' "
"attribute on the serializer Meta class.")
except AttributeError:
raise ImproperlyConfigured("%s must implement a Meta class." % self.__class__.__name__)
if not self.instance:
self.instance = EmptySearchQuerySet()
@staticmethod
def _get_default_field_kwargs(model, field):
"""
Get the required attributes from the model field in order
to instantiate a REST Framework serializer field.
"""
kwargs = {}
if field.model_attr in model._meta.get_all_field_names():
model_field = model._meta.get_field_by_name(field.model_attr)[0]
kwargs = get_field_kwargs(field.model_attr, model_field)
# Remove stuff we don't care about!
delete_attrs = [
"allow_blank",
"choices",
"model_field",
]
for attr in delete_attrs:
if attr in kwargs:
del kwargs[attr]
return kwargs
def get_fields(self):
"""
Get the required fields for serializing the result.
"""
fields = getattr(self.Meta, "fields", [])
exclude = getattr(self.Meta, "exclude", [])
if fields and exclude:
raise ImproperlyConfigured("Cannot set both `fields` and `exclude`.")
ignore_fields = getattr(self.Meta, "ignore_fields", [])
indices = getattr(self.Meta, "index_classes")
declared_fields = copy.deepcopy(self._declared_fields)
prefix_field_names = len(indices) > 1
field_mapping = OrderedDict()
# overlapping fields on multiple indices is supported by internally prefixing the field
# names with the index class to which they belong or, optionally, a user-provided alias
# for the index.
for index_cls in self.Meta.index_classes:
prefix = ""
if prefix_field_names:
prefix = "_%s__" % self._get_index_class_name(index_cls)
for field_name, field_type in six.iteritems(index_cls.fields):
orig_name = field_name
field_name = "%s%s" % (prefix, field_name)
# This has become a little more complex, but provides convenient flexibility for users
if not exclude:
if orig_name not in fields and field_name not in fields:
continue
elif orig_name in exclude or field_name in exclude or orig_name in ignore_fields or field_name in ignore_fields:
continue
# Look up the field attributes on the current index model,
# in order to correctly instantiate the serializer field.
model = index_cls().get_model()
kwargs = self._get_default_field_kwargs(model, field_type)
kwargs['prefix_field_names'] = prefix_field_names
field_mapping[field_name] = self._field_mapping[field_type](**kwargs)
# Add any explicitly declared fields. They *will* override any index fields
# in case of naming collision!.
if declared_fields:
for field_name in declared_fields:
if field_name in field_mapping:
warnings.warn("Field '{field}' already exists in the field list. This *will* "
"overwrite existing field '{field}'".format(field=field_name))
field_mapping[field_name] = declared_fields[field_name]
return field_mapping
def to_representation(self, instance):
"""
If we have a serializer mapping, use that. Otherwise, use standard serializer behavior
Since we might be dealing with multiple indexes, some fields might
not be valid for all results. Do not render the fields which don't belong
to the search result.
"""
if getattr(self.Meta, "serializers", None):
ret = self.multi_serializer_representation(instance)
else:
ret = super(HaystackSerializer, self).to_representation(instance)
prefix_field_names = len(getattr(self.Meta, "index_classes")) > 1
current_index = self._get_index_class_name(type(instance.searchindex))
for field in self.fields.keys():
orig_field = field
if prefix_field_names:
parts = field.split("__")
if len(parts) > 1:
index = parts[0][1:] # trim the preceding '_'
field = parts[1]
if index == current_index:
ret[field] = ret[orig_field]
del ret[orig_field]
elif field not in chain(instance.searchindex.fields.keys(), self._declared_fields.keys()):
del ret[orig_field]
# include the highlighted field in either case
if getattr(instance, "highlighted", None):
ret["highlighted"] = instance.highlighted[0]
return ret
def multi_serializer_representation(self, instance):
serializers = self.Meta.serializers
index = instance.searchindex
serializer_class = serializers.get(type(index), None)
if not serializer_class:
raise ImproperlyConfigured("Could not find serializer for %s in mapping" % index)
return serializer_class(context=self._context).to_representation(instance)
def _get_index_class_name(self, index_cls):
"""
Converts in index model class to a name suitable for use as a field name prefix. A user
may optionally specify custom aliases via an 'index_aliases' attribute on the Meta class
"""
cls_name = index_cls.__name__
aliases = getattr(self.Meta, "index_aliases", {})
return aliases.get(cls_name, cls_name.split('.')[-1])
class HaystackSerializerMixin(object):
"""
This mixin can be added to a rerializer to use the actual object as the data source for serialization rather
than the data stored in the search index fields. This makes it easy to return data from search results in
the same format as elswhere in your API and reuse your existing serializers
"""
def to_representation(self, instance):
obj = instance.object
return super(HaystackSerializerMixin, self).to_representation(obj)
class HighlighterMixin(object):
"""
This mixin adds support for ``highlighting`` (the pure python, portable
version, not SearchQuerySet().highlight()). See Haystack docs
for more info).
"""
highlighter_class = Highlighter
highlighter_css_class = "highlighted"
highlighter_html_tag = "span"
highlighter_max_length = 200
highlighter_field = None
def get_highlighter(self):
if not self.highlighter_class:
raise ImproperlyConfigured(
"%(cls)s is missing a highlighter_class. Define %(cls)s.highlighter_class, "
"or override %(cls)s.get_highlighter()." %
{"cls": self.__class__.__name__}
)
return self.highlighter_class
@staticmethod
def get_document_field(instance):
"""
Returns which field the search index has marked as it's
`document=True` field.
"""
for name, field in instance.searchindex.fields.items():
if field.document is True:
return name
def to_representation(self, instance):
ret = super(HighlighterMixin, self).to_representation(instance)
terms = " ".join(six.itervalues(self.context["request"].GET))
if terms:
highlighter = self.get_highlighter()(terms, **{
"html_tag": self.highlighter_html_tag,
"css_class": self.highlighter_css_class,
"max_length": self.highlighter_max_length
})
document_field = self.get_document_field(instance)
if highlighter and document_field:
ret["highlighted"] = highlighter.highlight(getattr(instance, self.highlighter_field or document_field))
return ret<|fim▁end|> |
from rest_framework import serializers
from rest_framework.compat import OrderedDict
from rest_framework.fields import empty |
<|file_name|>_notify_raceboss_cry_msg_request_zocl.hpp<|end_file_name|><|fim▁begin|>// This file auto generated by plugin for ida pro. Generated code only for x64. Please, dont change manually<|fim▁hole|>#pragma once
#include <common/common.h>
START_ATF_NAMESPACE
struct _notify_raceboss_cry_msg_request_zocl
{
char wszCryMsg[10][65];
};
END_ATF_NAMESPACE<|fim▁end|> | |
<|file_name|>arduino.js<|end_file_name|><|fim▁begin|>var Arduino = function(port) {
this.portName = port;
this.status = "CLOSED";<|fim▁hole|>}
module.exports = Arduino;<|fim▁end|> | |
<|file_name|>bitcoin_sq.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="sq" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>breakout</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The breakout developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Klikoni 2 herë për të ndryshuar adressën ose etiketën</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Krijo një adresë të re</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopjo adresën e zgjedhur në memorjen e sistemit </translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your breakout addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Fshi</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Skedar i ndarë me pikëpresje(*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiketë</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresë</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(pa etiketë)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Futni frazkalimin</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Frazkalim i ri</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Përsërisni frazkalimin e ri</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Futni frazkalimin e ri në portofol.<br/>Ju lutemi përdorni një frazkalim prej<b>10 ose më shumë shkronjash të rastësishme<b/>, ose tetë apo më shumë fjalë</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Enkripto portofolin</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ky veprim ka nevojë per frazkalimin e portofolit tuaj që të ç'kyç portofolin.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>ç'kyç portofolin.</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ky veprim kërkon frazkalimin e portofolit tuaj që të dekriptoj portofolin.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dekripto portofolin</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Ndrysho frazkalimin</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Futni frazkalimin e vjetër dhe të ri në portofol. </translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Konfirmoni enkriptimin e portofolit</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Portofoli u enkriptua</translation>
</message>
<message>
<location line="-58"/>
<source>breakout will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Enkriptimi i portofolit dështoi</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Enkriptimi i portofolit dështoi për shkak të një gabimi të brëndshëm. portofoli juaj nuk u enkriptua.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Frazkalimet e plotësuara nuk përputhen.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>ç'kyçja e portofolit dështoi</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Frazkalimi i futur për dekriptimin e portofolit nuk ishte i saktë.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dekriptimi i portofolit dështoi</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Duke u sinkronizuar me rrjetin...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Përmbledhje</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Trego një përmbledhje te përgjithshme të portofolit</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transaksionet</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Shfleto historinë e transaksioneve</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive BRO</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send BRO</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Mbyllni aplikacionin</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opsione</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send BRO to a breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ndrysho frazkalimin e përdorur per enkriptimin e portofolit</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Skedar</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Konfigurimet</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Ndihmë</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Shiriti i mjeteve</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testo rrjetin]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>breakout client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to breakout network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About breakout card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about breakout card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>I azhornuar</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Duke u azhornuar...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction requires a fee based on the services it uses. You may send it for a fee of %1 BRO, which rewards all users of the Breakout network as a result of your usage. Do you want to pay this fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Dërgo transaksionin</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Transaksion në ardhje</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid breakout address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Portofoli po <b> enkriptohet</b> dhe është <b> i ç'kyçur</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Portofoli po <b> enkriptohet</b> dhe është <b> i kyçur</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. breakout can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Sasia</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Adresë</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(pa etiketë)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Ndrysho Adresën</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiketë</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adresa</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Adresë e re pritëse</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Adresë e re dërgimi</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Ndrysho adresën pritëse</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>ndrysho adresën dërguese</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Adresa e dhënë "%1" është e zënë në librin e adresave. </translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid breakout address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Nuk mund të ç'kyçet portofoli.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Krijimi i çelësit të ri dështoi.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>breakout-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opsionet</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start breakout after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start breakout on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the breakout client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the breakout network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting breakout.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show breakout addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting breakout.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formilarë</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the breakout network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transaksionet e fundit</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the breakout-Qt help message to get a list with possible breakout command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>breakout - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>breakout Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the breakout debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the breakout RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Dërgo Monedha</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Dërgo marrësve të ndryshëm njëkohësisht</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Balanca:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Konfirmo veprimin e dërgimit</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a breakout address (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>konfirmo dërgimin e monedhave</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Shuma e paguar duhet të jetë më e madhe se 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(pa etiketë)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Sh&uma:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Paguaj &drejt:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Krijoni një etiketë për këtë adresë që t'ja shtoni librit të adresave</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Etiketë:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Ngjit nga memorja e sistemit</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a breakout address (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Ngjit nga memorja e sistemit</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified breakout address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a breakout address (e.g. breakoutfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter breakout signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Hapur deri më %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/I pakonfirmuar</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 konfirmimet</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/><|fim▁hole|> <location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Sasia</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, nuk është transmetuar me sukses deri tani</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>i/e panjohur</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detajet e transaksionit</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ky panel tregon një përshkrim të detajuar të transaksionit</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Lloji</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresë</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Sasia</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Hapur deri më %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>I/E konfirmuar(%1 konfirmime)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Ky bllok është marrë nga ndonjë nyje dhe ka shumë mundësi të mos pranohet! </translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>I krijuar por i papranuar</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Marrë me</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Dërguar drejt</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagesë ndaj vetvetes</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minuar</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(p/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Marrë me</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Dërguar drejt</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minuar</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Skedar i ndarë me pikëpresje(*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Lloji</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiketë</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresë</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Sasia</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>breakout version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or breakoutd</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: breakout.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: breakoutd.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong breakout will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=breakoutrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "breakout Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. breakout is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of breakout</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart breakout to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. breakout is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.