prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>PRESUBMIT.py<|end_file_name|><|fim▁begin|># Copyright 2012 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Top-level presubmit script for V8.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import sys
def _V8PresubmitChecks(input_api, output_api):
"""Runs the V8 presubmit checks."""
import sys
sys.path.append(input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools'))
from presubmit import CppLintProcessor
from presubmit import SourceProcessor
from presubmit import CheckGeneratedRuntimeTests
results = []
if not CppLintProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError("C++ lint check failed"))
if not SourceProcessor().Run(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Copyright header, trailing whitespaces and two empty lines " \
"between declarations check failed"))
if not CheckGeneratedRuntimeTests(input_api.PresubmitLocalPath()):
results.append(output_api.PresubmitError(
"Generated runtime tests check failed"))
return results
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'buildtools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
results.append(output_api.PresubmitPromptOrNotify(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.CheckOwners(
input_api, output_api, source_file_filter=None))
results.extend(_V8PresubmitChecks(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
return results
def _SkipTreeCheck(input_api, output_api):
"""Check the env var whether we want to skip tree check.
Only skip if src/version.cc has been updated."""
src_version = 'src/version.cc'
FilterFile = lambda file: file.LocalPath() == src_version
if not input_api.AffectedSourceFiles(
lambda file: file.LocalPath() == src_version):<|fim▁hole|> return False
return input_api.environ.get('PRESUBMIT_TREE_CHECK') == 'skip'
def _CheckChangeLogFlag(input_api, output_api):
"""Checks usage of LOG= flag in the commit message."""
results = []
if input_api.change.BUG and not 'LOG' in input_api.change.tags:
results.append(output_api.PresubmitError(
'An issue reference (BUG=) requires a change log flag (LOG=). '
'Use LOG=Y for including this commit message in the change log. '
'Use LOG=N or leave blank otherwise.'))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLogFlag(input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
if not _SkipTreeCheck(input_api, output_api):
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
json_url='http://v8-status.appspot.com/current?format=json'))
return results
def GetPreferredTryMasters(project, change):
return {
'tryserver.v8': {
'v8_linux_rel': set(['defaulttests']),
'v8_linux_dbg': set(['defaulttests']),
'v8_linux_nosnap_rel': set(['defaulttests']),
'v8_linux_nosnap_dbg': set(['defaulttests']),
'v8_linux64_rel': set(['defaulttests']),
'v8_linux_arm_dbg': set(['defaulttests']),
'v8_linux_arm64_rel': set(['defaulttests']),
'v8_linux_layout_dbg': set(['defaulttests']),
'v8_mac_rel': set(['defaulttests']),
'v8_win_rel': set(['defaulttests']),
},
}<|fim▁end|>
| |
<|file_name|>iomhelper.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************
* $Id: iomhelper.cpp 10645 2007-01-18 02:22:39Z warmerdam $
*
* Project: Interlis 1/2 Translator
* Purpose: Implementation of ILI1Reader class.
* Author: Pirmin Kalberer, Sourcepole AG
*
******************************************************************************
* Copyright (c) 2004, Pirmin Kalberer, Sourcepole AG
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included
* in all copies or substantial portions of the Software.
*<|fim▁hole|> * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
****************************************************************************/
#include "iomhelper.h"
#include "cpl_port.h"
CPL_CVSID("$Id: iomhelper.cpp 10645 2007-01-18 02:22:39Z warmerdam $");
IOM_OBJECT GetAttrObj(IOM_BASKET model, IOM_OBJECT obj, const char* attrname) {
IOM_OBJECT attrobj = iom_getattrobj(obj, attrname, 0);
if (attrobj == NULL) return NULL;
const char *refoid=iom_getobjectrefoid(attrobj);
if (refoid == NULL) return NULL;
return iom_getobject(model, refoid);
}
int GetAttrObjPos(IOM_OBJECT obj, const char* attrname) {
IOM_OBJECT attrobj = iom_getattrobj(obj, attrname, 0);
if (attrobj == NULL) return -1;
return iom_getobjectreforderpos(attrobj);
}
const char* GetAttrObjName(IOM_BASKET model, IOM_OBJECT obj, const char* attrname) {
return iom_getattrvalue(GetAttrObj(model, obj, attrname), "name");
}
IOM_OBJECT GetTypeObj(IOM_BASKET model, IOM_OBJECT obj) {
IOM_OBJECT typeobj = GetAttrObj(model, obj, "type");
if (typeobj && EQUAL(iom_getobjecttag(typeobj), "iom04.metamodel.TypeAlias")) {
typeobj = GetTypeObj(model, GetAttrObj(model, typeobj, "aliasing"));
}
return typeobj;
}
const char* GetTypeName(IOM_BASKET model, IOM_OBJECT obj) {
IOM_OBJECT typeobj = GetTypeObj(model, obj);
if (typeobj == NULL) return "(null)";
return iom_getobjecttag(typeobj);
}
unsigned int GetCoordDim(IOM_BASKET model, IOM_OBJECT typeobj) {
unsigned int dim = 0;
//find attribute of this type with highest orderpos
IOM_ITERATOR modelelei=iom_iteratorobject(model);
IOM_OBJECT modelele=iom_nextobject(modelelei);
while(modelele){
const char *tag=iom_getobjecttag(modelele);
if (tag && EQUAL(tag,"iom04.metamodel.NumericType")) {
if (GetAttrObj(model, modelele, "coordType") == typeobj) {
unsigned int orderpos = GetAttrObjPos(modelele, "coordType");
if (orderpos > dim) dim = orderpos;
}
}
iom_releaseobject(modelele);
modelele=iom_nextobject(modelelei);
}
iom_releaseiterator(modelelei);
return dim;
}
const char* GetAttrObjName(IOM_BASKET model, const char* tagname) {
const char* result = NULL;
IOM_ITERATOR modelelei = iom_iteratorobject(model);
IOM_OBJECT modelele = iom_nextobject(modelelei);
while (modelele && result == NULL)
{
if(EQUAL(iom_getobjecttag(modelele), tagname)){
// get name of topic
result = iom_getattrvalue(modelele, "name");
}
iom_releaseobject(modelele);
modelele=iom_nextobject(modelelei);
}
iom_releaseiterator(modelelei);
return result;
}<|fim▁end|>
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
<|file_name|>ContactResourceAssembler.java<|end_file_name|><|fim▁begin|>package msvcdojo;
import static org.springframework.hateoas.mvc.ControllerLinkBuilder.linkTo;
import org.springframework.hateoas.Resource;
import org.springframework.hateoas.mvc.ResourceAssemblerSupport;
import org.springframework.stereotype.Component;
// tag::ResourceAssembler[]
@Component
public class ContactResourceAssembler
extends ResourceAssemblerSupport<Contact, ContactResourceAssembler.ContactResource> {<|fim▁hole|> public ContactResourceAssembler() {
super(ContactController.class, ContactResource.class);
}
@Override
public ContactResource toResource(Contact entity) {
ContactResource resource = createResourceWithId(entity.getId(), entity);
resource.add(linkTo(ContactController.class).slash(entity.getId()).slash("accounts").withRel("contact-accounts"));
return resource;
}
@Override
protected ContactResource instantiateResource(Contact entity) {
return new ContactResource(entity);
}
static class ContactResource extends Resource<Contact> {
public ContactResource(Contact contact) {
super(contact);
}
}
}
// end::ResourceAssembler[]<|fim▁end|>
| |
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|>import { LogoSnapchat24 } from "../../";<|fim▁hole|>export = LogoSnapchat24;<|fim▁end|>
| |
<|file_name|>basic-snack-bar.module.spec.ts<|end_file_name|><|fim▁begin|>import { BasicSnackBarModule } from './basic-snack-bar.module';
describe('BasicSnackBarModule', () => {
let basicSnackBarModule: BasicSnackBarModule;
beforeEach(() => {
basicSnackBarModule = new BasicSnackBarModule();
});
it('should create an instance', () => {
expect(basicSnackBarModule).toBeTruthy();<|fim▁hole|><|fim▁end|>
|
});
});
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding: utf-8
# pylint: disable=too-many-lines
import inspect
import sys
from typing import TypeVar, Optional, Sequence, Iterable, List, Any
from owlmixin import util
from owlmixin.errors import RequiredError, UnknownPropertiesError, InvalidTypeError
from owlmixin.owlcollections import TDict, TIterator, TList
from owlmixin.owlenum import OwlEnum, OwlObjectEnum
from owlmixin.transformers import (
DictTransformer,
JsonTransformer,
YamlTransformer,
ValueTransformer,
traverse_dict,
TOption,
)
T = TypeVar("T", bound="OwlMixin")
def _is_generic(type_):
return hasattr(type_, "__origin__")
def assert_extra(cls_properties, arg_dict, cls):
extra_keys: set = set(arg_dict.keys()) - {n for n, t in cls_properties}
if extra_keys:
raise UnknownPropertiesError(cls=cls, props=sorted(extra_keys))
def assert_none(value, type_, cls, name):
if value is None:
raise RequiredError(cls=cls, prop=name, type_=type_)
def assert_types(value, types: tuple, cls, name):
if not isinstance(value, types):
raise InvalidTypeError(cls=cls, prop=name, value=value, expected=types, actual=type(value))
def traverse(
type_, name, value, cls, force_snake_case: bool, force_cast: bool, restrict: bool
) -> Any:
# pylint: disable=too-many-return-statements,too-many-branches,too-many-arguments
if isinstance(type_, str):
type_ = sys.modules[cls.__module__].__dict__.get(type_)
if hasattr(type_, "__forward_arg__"):
# `_ForwardRef` (3.6) or `ForwardRef` (>= 3.7) includes __forward_arg__
# PEP 563 -- Postponed Evaluation of Annotations
type_ = sys.modules[cls.__module__].__dict__.get(type_.__forward_arg__)
if not _is_generic(type_):
assert_none(value, type_, cls, name)
if type_ is any:
return value
if type_ is Any:
return value
if isinstance(value, type_):
return value
if issubclass(type_, OwlMixin):
assert_types(value, (type_, dict), cls, name)
return type_.from_dict(
value, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if issubclass(type_, ValueTransformer):
return type_.from_value(value)
if force_cast:
return type_(value)
assert_types(value, (type_,), cls, name)
return value
o_type = type_.__origin__
g_type = type_.__args__
if o_type == TList:
assert_none(value, type_, cls, name)
assert_types(value, (list,), cls, name)
return TList(
[
traverse(g_type[0], f"{name}.{i}", v, cls, force_snake_case, force_cast, restrict)
for i, v in enumerate(value)
]
)
if o_type == TIterator:
assert_none(value, type_, cls, name)
assert_types(value, (Iterable,), cls, name)
return TIterator(
traverse(g_type[0], f"{name}.{i}", v, cls, force_snake_case, force_cast, restrict)
for i, v in enumerate(value)
)
if o_type == TDict:
assert_none(value, type_, cls, name)
assert_types(value, (dict,), cls, name)
return TDict(
{
k: traverse(
g_type[0], f"{name}.{k}", v, cls, force_snake_case, force_cast, restrict
)
for k, v in value.items()
}
)
if o_type == TOption:
v = value.get() if isinstance(value, TOption) else value
# TODO: Fot `from_csvf`... need to more simple!!
if (isinstance(v, str) and v) or (not isinstance(v, str) and v is not None):
return TOption(
traverse(g_type[0], name, v, cls, force_snake_case, force_cast, restrict)
)
return TOption(None)
raise RuntimeError(f"This generics is not supported `{o_type}`")
class OwlMeta(type):
def __new__(cls, name, bases, class_dict):
ret_cls = type.__new__(cls, name, bases, class_dict)
ret_cls.__methods_dict__ = dict(inspect.getmembers(ret_cls, inspect.ismethod))
return ret_cls
class OwlMixin(DictTransformer, JsonTransformer, YamlTransformer, metaclass=OwlMeta):
@classmethod
def from_dict(
cls,
d: dict,
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> T:
"""From dict to instance
:param d: Dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human, Food, Japanese
>>> human: Human = Human.from_dict({
... "id": 1,
... "name": "Tom",
... "favorites": [
... {"name": "Apple", "names_by_lang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... })
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].name
'Apple'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
You can use default value
>>> taro: Japanese = Japanese.from_dict({
... "name": 'taro'
... }) # doctest: +NORMALIZE_WHITESPACE
>>> taro.name
'taro'
>>> taro.language
'japanese'
If you don't set `force_snake=False` explicitly, keys are transformed to snake case as following.
>>> human: Human = Human.from_dict({
... "--id": 1,
... "<name>": "Tom",
... "favorites": [
... {"name": "Apple", "namesByLang": {"en": "Apple"}}<|fim▁hole|> ... ]
... })
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["en"]
'Apple'
You can allow extra parameters (like ``hogehoge``) if you set `restrict=False`.
>>> apple: Food = Food.from_dict({
... "name": "Apple",
... "hogehoge": "ooooooooooooooooooooo",
... }, restrict=False)
>>> apple.to_dict()
{'name': 'Apple'}
You can prohibit extra parameters (like ``hogehoge``) if you set `restrict=True` (which is default).
>>> human = Human.from_dict({
... "id": 1,
... "name": "Tom",
... "hogehoge1": "ooooooooooooooooooooo",
... "hogehoge2": ["aaaaaaaaaaaaaaaaaa", "iiiiiiiiiiiiiiiii"],
... "favorites": [
... {"name": "Apple", "namesByLang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.UnknownPropertiesError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Unknown properties error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human` has unknown properties ['hogehoge1', 'hogehoge2']!!
<BLANKLINE>
* If you want to allow unknown properties, set `restrict=False`
* If you want to disallow unknown properties, add `hogehoge1` and `hogehoge2` to owlmixin.samples.Human
<BLANKLINE>
If you specify wrong type...
>>> human: Human = Human.from_dict({
... "id": 1,
... "name": "ichiro",
... "favorites": ["apple", "orange"]
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.InvalidTypeError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Invalid Type error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#favorites.0 = apple` doesn't match expected types.
Expected type is one of ["<class 'owlmixin.samples.Food'>", "<class 'dict'>"], but actual type is `<class 'str'>`
<BLANKLINE>
* If you want to force cast, set `force_cast=True`
* If you don't want to force cast, specify value which has correct type
<BLANKLINE>
If you don't specify required params... (ex. name
>>> human: Human = Human.from_dict({
... "id": 1
... }) # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.RequiredError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Required error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#name: <class 'str'>` is empty!!
<BLANKLINE>
* If `name` is certainly required, specify anything.
* If `name` is optional, change type from `<class 'str'>` to `TOption[<class 'str'>]`
<BLANKLINE>
"""
if isinstance(d, cls):
return d
instance: T = cls() # type: ignore
d = util.replace_keys(d, {"self": "_self"}, force_snake_case)
properties = cls.__annotations__.items()
if restrict:
assert_extra(properties, d, cls)
for n, t in properties:
f = cls.__methods_dict__.get(f"_{cls.__name__}___{n}") # type: ignore
arg_v = f(d.get(n)) if f else d.get(n)
def_v = getattr(instance, n, None)
setattr(
instance,
n,
traverse(
type_=t,
name=n,
value=def_v if arg_v is None else arg_v,
cls=cls,
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
),
)
return instance
@classmethod
def from_optional_dict(
cls,
d: Optional[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[T]:
"""From dict to optional instance.
:param d: Dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dict(None).is_none()
True
>>> Human.from_optional_dict({}).get() # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
owlmixin.errors.RequiredError:
. ∧,,_∧ ,___________________
⊂ ( ・ω・ )つ- < Required error
/// /::/ `-------------------
|::|/⊂ヽノ|::|」
/ ̄ ̄旦 ̄ ̄ ̄/|
______/ | |
|------ー----ー|/
<BLANKLINE>
`owlmixin.samples.Human#id: <class 'int'>` is empty!!
<BLANKLINE>
* If `id` is certainly required, specify anything.
* If `id` is optional, change type from `<class 'int'>` to `TOption[<class 'int'>]`
<BLANKLINE>
"""
return TOption(
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if d is not None
else None
)
@classmethod
def from_dicts(
cls,
ds: List[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TList[T]:
"""From list of dict to list of instance
:param ds: List of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_dicts([
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ])
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
"""
return TList(
[
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for d in ds
]
)
@classmethod
def from_iterable_dicts(
cls,
ds: Iterable[dict],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TIterator[T]:
"""From iterable dict to iterable instance
:param ds: Iterable dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterator
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_iterable_dicts([
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ])
>>> humans.next_at(0).get().name
'Tom'
>>> humans.next_at(0).get().name
'John'
"""
return TIterator(
cls.from_dict(
d, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for d in ds
)
@classmethod
def from_optional_dicts(
cls,
ds: Optional[List[dict]],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TList[T]]:
"""From list of dict to optional list of instance.
:param ds: List of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts(None).is_none()
True
>>> Human.from_optional_dicts([]).get()
[]
"""
return TOption(
cls.from_dicts(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_optional_iterable_dicts(
cls,
ds: Optional[Iterable[dict]],
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TIterator[T]]:
"""From iterable dict to optional iterable instance.
:param ds: Iterable dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts(None).is_none()
True
>>> Human.from_optional_dicts([]).get()
[]
"""
return TOption(
cls.from_iterable_dicts(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_dicts_by_key(
cls,
ds: dict,
*,
force_snake_case: bool = True,
force_cast: bool = False,
restrict: bool = True,
) -> TDict[T]:
"""From dict of dict to dict of instance
:param ds: Dict of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Dict of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans_by_name: TDict[Human] = Human.from_dicts_by_key({
... 'Tom': {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... 'John': {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... })
>>> humans_by_name['Tom'].name
'Tom'
>>> humans_by_name['John'].name
'John'
"""
return TDict(
{
k: cls.from_dict(
v, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
for k, v in ds.items()
}
)
@classmethod
def from_optional_dicts_by_key(
cls,
ds: Optional[dict],
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TOption[TDict[T]]:
"""From dict of dict to optional dict of instance.
:param ds: Dict of dict
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Dict of instance
Usage:
>>> from owlmixin.samples import Human
>>> Human.from_optional_dicts_by_key(None).is_none()
True
>>> Human.from_optional_dicts_by_key({}).get()
{}
"""
return TOption(
cls.from_dicts_by_key(
ds, force_snake_case=force_snake_case, force_cast=force_cast, restrict=restrict
)
if ds is not None
else None
)
@classmethod
def from_json(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> T:
"""From json string to instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> human: Human = Human.from_json('''{
... "id": 1,
... "name": "Tom",
... "favorites": [
... {"name": "Apple", "names_by_lang": {"en": "Apple", "de": "Apfel"}},
... {"name": "Orange"}
... ]
... }''')
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
"""
return cls.from_dict(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> T:
"""From json file path to instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_json_to_list(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> TList[T]:
"""From json string to list of instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_json_to_list('''[
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ]''')
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
"""
return cls.from_dicts(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_json_to_iterator(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> TIterator[T]:
"""From json string to iterable instance
:param data: Json string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_json_to_iterator('''[
... {"id": 1, "name": "Tom", "favorites": [{"name": "Apple"}]},
... {"id": 2, "name": "John", "favorites": [{"name": "Orange"}]}
... ]''')
>>> humans.next_at(1).get().name
'John'
>>> humans.next_at(0).is_none()
True
"""
return cls.from_iterable_dicts(
util.load_json(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf_to_list(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> TList[T]:
"""From json file path to list of instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
"""
return cls.from_dicts(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_jsonf_to_iterator(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = False,
) -> TIterator[T]:
"""From json file path to iterable instance
:param fpath: Json file path
:param encoding: Json file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
"""
return cls.from_iterable_dicts(
util.load_jsonf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> T:
"""From yaml string to instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
Usage:
>>> from owlmixin.samples import Human
>>> human: Human = Human.from_yaml('''
... id: 1
... name: Tom
... favorites:
... - name: Apple
... names_by_lang:
... en: Apple
... de: Apfel
... - name: Orange
... ''')
>>> human.id
1
>>> human.name
'Tom'
>>> human.favorites[0].names_by_lang.get()["de"]
'Apfel'
"""
return cls.from_dict(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> T:
"""From yaml file path to instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml_to_list(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> TList[T]:
"""From yaml string to list of instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TList[Human] = Human.from_yaml_to_list('''
... - id: 1
... name: Tom
... favorites:
... - name: Apple
... - id: 2
... name: John
... favorites:
... - name: Orange
... ''')
>>> humans[0].name
'Tom'
>>> humans[1].name
'John'
>>> humans[0].favorites[0].name
'Apple'
"""
return cls.from_dicts(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yaml_to_iterator(
cls, data: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = True
) -> TIterator[T]:
"""From yaml string to iterable instance
:param data: Yaml string
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
Usage:
>>> from owlmixin.samples import Human
>>> humans: TIterator[Human] = Human.from_yaml_to_iterator('''
... - id: 1
... name: Tom
... favorites:
... - name: Apple
... - id: 2
... name: John
... favorites:
... - name: Orange
... ''')
>>> human1 = humans.next_at(1).get()
>>> human1.name
'John'
>>> humans.next_at(0).is_none()
True
>>> human1.favorites[0].name
'Orange'
"""
return cls.from_iterable_dicts(
util.load_yaml(data),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf_to_list(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TList[T]:
"""From yaml file path to list of instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: List of instance
"""
return cls.from_dicts(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_yamlf_to_iterator(
cls,
fpath: str,
encoding: str = "utf8",
*,
force_snake_case=True,
force_cast: bool = False,
restrict: bool = True,
) -> TIterator[T]:
"""From yaml file path to iterable instance
:param fpath: Yaml file path
:param encoding: Yaml file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Iterable instance
"""
return cls.from_iterable_dicts(
util.load_yamlf(fpath, encoding),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)
@classmethod
def from_csvf_to_list(
cls,
fpath: str,
fieldnames: Optional[Sequence[str]] = None,
encoding: str = "utf8",
*,
force_snake_case: bool = True,
restrict: bool = True,
) -> TList[T]:
"""From csv file path to list of instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: List of Instance
"""
return cls.from_dicts(
list(util.load_csvf(fpath, fieldnames, encoding)),
force_snake_case=force_snake_case,
force_cast=True,
restrict=restrict,
)
@classmethod
def from_csvf_to_iterator(
cls,
fpath: str,
fieldnames: Optional[Sequence[str]] = None,
encoding: str = "utf8",
*,
force_snake_case: bool = True,
restrict: bool = True,
) -> TIterator[T]:
"""From csv file path to iterable instance
:param fpath: Csv file path
:param fieldnames: Specify csv header names if not included in the file
:param encoding: Csv file encoding
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param restrict: Prohibit extra parameters if True
:return: Iterable Instance
"""
return cls.from_iterable_dicts(
util.load_csvf(fpath, fieldnames, encoding),
force_snake_case=force_snake_case,
force_cast=True,
restrict=restrict,
)
@classmethod
def from_json_url(
cls, url: str, *, force_snake_case=True, force_cast: bool = False, restrict: bool = False
) -> T:
"""From url which returns json to instance
:param url: Url which returns json
:param force_snake_case: Keys are transformed to snake case in order to compliant PEP8 if True
:param force_cast: Cast forcibly if True
:param restrict: Prohibit extra parameters if True
:return: Instance
"""
return cls.from_dict(
util.load_json_url(url),
force_snake_case=force_snake_case,
force_cast=force_cast,
restrict=restrict,
)<|fim▁end|>
| |
<|file_name|>check_i18n.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
"""check_i18n - compare texts in the source with the language files
Searches in the MoinMoin sources for calls of _() and tries to extract
the parameter. Then it checks the language modules if those parameters
are in the dictionary.
Usage: check_i18n.py [lang ...]
Without arguments, checks all languages in i18n or the specified
languages. Look into MoinMoin.i18n.__init__ for availeable language
names.
The script will run from the moin root directory, where the MoinMoin
package lives, or from MoinMoin/i18n where this script lives.
TextFinder class based on code by Seo Sanghyeon and the python compiler
package.
TODO: fix it for the changed i18n stuff of moin 1.6
@copyright: 2003 Florian Festi, Nir Soffer, Thomas Waldmann
@license: GNU GPL, see COPYING for details.
"""
output_encoding = 'utf-8'
# These lead to crashes (MemoryError - due to missing codecs?)
#blacklist_files = ["ja.py", "zh.py", "zh_tw.py"]
#blacklist_langs = ["ja", "zh", "zh-tw"]
# If you have cjkcodecs installed, use this:
blacklist_files = []
blacklist_langs = []
import sys, os, compiler
from compiler.ast import Name, Const, CallFunc, Getattr
class TextFinder:
""" Walk through AST tree and collect text from gettext calls
Find all calls to gettext function in the source tree and collect
the texts in a dict. Use compiler to create an abstract syntax tree
from each source file, then find the nodes for gettext function
call, and get the text from the call.
Localized texts are used usually translated during runtime by
gettext functions and apear in the source as
_('text...'). TextFinder class finds calls to the '_' function in
any namespace, or your prefered gettext function.
Note that TextFinder will only retrieve text from function calls
with a constant argument like _('text'). Calls like _('text' % locals()),
_('text 1' + 'text 2') are marked as bad call in the report, and the
text is not retrieved into the dictionary.
Note also that texts in source can appear several times in the same
file or different files, but they will only apear once in the
dictionary that this tool creates.
The dictionary value for each text is a dictionary of filenames each
containing a list of (best guess) lines numbers containning the text.
"""
def __init__(self, name='_'):
""" Init with the gettext function name or '_'"""
self._name = name # getText function name
self._dictionary = {} # Unique texts in the found texts
self._found = 0 # All good calls including duplicates
self._bad = 0 # Bad calls: _('%s' % var) or _('a' + 'b')
def setFilename(self, filename):
"""Remember the filename we are parsing"""
self._filename = filename
def visitModule(self, node):
""" Start the search from the top node of a module
This is the entry point into the search. When compiler.walk is
called it calls this method with the module node.
This is the place to initialize module specific data.
"""
self._visited = {} # init node cache - we will visit each node once
self._lineno = 'NA' # init line number
# Start walking in the module node
self.walk(node)
def walk(self, node):
""" Walk through all nodes """
if node in self._visited:
# We visited this node already
return
self._visited[node] = 1
if not self.parseNode(node):
for child in node.getChildNodes():
self.walk(child)
def parseNode(self, node):
""" Parse function call nodes and collect text """
# Get the current line number. Since not all nodes have a line number
# we save the last line number - it should be close to the gettext call
if node.lineno is not None:
self._lineno = node.lineno
if node.__class__ == CallFunc and node.args:
child = node.node
klass = child.__class__
if (# Standard call _('text')
(klass == Name and child.name == self._name) or
# A call to an object attribute: object._('text')
(klass == Getattr and child.attrname == self._name)):
if node.args[0].__class__ == Const:
# Good call with a constant _('text')
self.addText(node.args[0].value)
else:
self.addBadCall(node)
return 1
return 0
def addText(self, text):
""" Add text to dictionary and count found texts.
Note that number of texts in dictionary could be different from
the number of texts found, because some texts appear several
times in the code.
Each text value is a dictionary of filenames that contain the
text and each filename value is the list of line numbers with
the text. Missing line numbers are recorded as 'NA'.
self._lineno is the last line number we checked. It may be the line
number of the text, or near it.
"""
self._found = self._found + 1
# Create key for this text if needed
if text not in self._dictionary:
self._dictionary[text] = {}
# Create key for this filename if needed
textInfo = self._dictionary[text]
if self._filename not in textInfo:
textInfo[self._filename] = [self._lineno]
else:
textInfo[self._filename].append(self._lineno)
def addBadCall(self, node):
"""Called when a bad call like _('a' + 'b') is found"""
self._bad = self._bad + 1
print
print "<!> Warning: non-constant _ call:"
print " `%s`" % str(node)
print " `%s`:%s" % (self._filename, self._lineno)
# Accessors
def dictionary(self):
return self._dictionary
def bad(self):
return self._bad
def found(self):
return self._found
def visit(path, visitor):
visitor.setFilename(path)
tree = compiler.parseFile(path)
compiler.walk(tree, visitor)
# MoinMoin specific stuff follows
class Report:
"""Language status report"""
def __init__(self, lang, sourceDict):
self.__lang = lang
self.__sourceDict = sourceDict
self.__langDict = None
self.__missing = {}
self.__unused = {}
self.__error = None
self.__ready = 0
self.create()
def loadLanguage(self):
filename = i18n.filename(self.__lang)
self.__langDict = pysupport.importName("MoinMoin.i18n." + filename, "text")
def create(self):
"""Compare language text dict against source dict"""
self.loadLanguage()
if not self.__langDict:
self.__error = "Language %s not found!" % self.__lang
self.__ready = 1
return
# Collect missing texts
for text in self.__sourceDict:
if text not in self.__langDict:
self.__missing[text] = self.__sourceDict[text]
# Collect unused texts
for text in self.__langDict:
if text not in self.__sourceDict:
self.__unused[text] = self.__langDict[text]
self.__ready = 1
def summary(self):
"""Return summary dict"""
summary = {
'name': i18n.languages[self.__lang][i18n.ENAME].encode(output_encoding),
'maintainer': i18n.languages[self.__lang][i18n.MAINTAINER],
'total': len(self.__langDict),
'missing': len(self.__missing),
'unused': len(self.__unused),
'error': self.__error
}
return summary
def missing(self):
return self.__missing
def unused(self):
return self.__unused
<|fim▁hole|>if __name__ == '__main__':
import time
# Check that we run from the root directory where MoinMoin package lives
# or from the i18n directory when this script lives
if os.path.exists('MoinMoin/__init__.py'):
# Running from the root directory
MoinMoin_dir = os.curdir
elif os.path.exists(os.path.join(os.pardir, 'i18n')):
# Runing from i18n
MoinMoin_dir = os.path.join(os.pardir, os.pardir)
else:
print __doc__
sys.exit(1)
# Insert MoinMoin_dir into sys.path
sys.path.insert(0, MoinMoin_dir)
from MoinMoin import i18n
from MoinMoin.util import pysupport
textFinder = TextFinder()
found = 0
unique = 0
bad = 0
# Find gettext calls in the source
for root, dirs, files in os.walk(os.path.join(MoinMoin_dir, 'MoinMoin')):
for name in files:
if name.endswith('.py'):
if name in blacklist_files: continue
path = os.path.join(root, name)
#print '%(path)s:' % locals(),
visit(path, textFinder)
# Report each file's results
new_unique = len(textFinder.dictionary()) - unique
new_found = textFinder.found() - found
#print '%(new_unique)d (of %(new_found)d)' % locals()
# Warn about bad calls - these should be fixed!
new_bad = textFinder.bad() - bad
#if new_bad:
# print '### Warning: %(new_bad)d bad call(s)' % locals()
unique = unique + new_unique
bad = bad + new_bad
found = found + new_found
# Print report using wiki markup, so we can publish this on MoinDev
# !!! Todo:
# save executive summary for the wiki
# save separate report for each language to be sent to the
# language translator.
# Update the wiki using XML-RPC??
print "This page is generated by `MoinMoin/i18n/check_i18n.py`."
print "To recreate this report run `make check-i18n` and paste here"
print
print '----'
print
print '<<TableOfContents(2)>>'
print
print
print "= Translation Report ="
print
print "== Summary =="
print
print 'Created on %s' % time.asctime()
print
print ('\n%(unique)d unique texts in dictionary of %(found)d texts '
'in source.') % locals()
if bad:
print '\n%(bad)d bad calls.' % locals()
print
# Check languages from the command line or from moin.i18n against
# the source
if sys.argv[1:]:
languages = sys.argv[1:]
else:
languages = i18n.languages.keys()
for lang in blacklist_langs:
# problems, maybe due to encoding?
if lang in languages:
languages.remove(lang)
if 'en' in languages:
languages.remove('en') # there is no en lang file
languages.sort()
# Create report for all languages
report = {}
for lang in languages:
report[lang] = Report(lang, textFinder.dictionary())
# Print summary for all languages
print ("||<:>'''Language'''||<:>'''Texts'''||<:>'''Missing'''"
"||<:>'''Unused'''||")
for lang in languages:
print ("||%(name)s||<)>%(total)s||<)>%(missing)s||<)>%(unused)s||"
) % report[lang].summary()
# Print details
for lang in languages:
dict = report[lang].summary()
print
print "== %(name)s ==" % dict
print
print "Maintainer: <<MailTo(%(maintainer)s)>>" % dict
# Print missing texts, if any
if report[lang].missing():
print """
=== Missing texts ===
These items should ''definitely'' get fixed.
Maybe the corresponding english text in the source code was only changed
slightly, then you want to look for a similar text in the ''unused''
section below and modify i18n, so that it will match again.
"""
for text in report[lang].missing():
print " 1. `%r`" % text
# Print unused texts, if any
if report[lang].unused():
print """
=== Possibly unused texts ===
Be ''very careful'' and double-check before removing any of these
potentially unused items.
This program can't detect references done from wiki pages, from
userprefs options, from Icon titles etc.!
"""
for text in report[lang].unused():
print " 1. `%r`" % text<|fim▁end|>
| |
<|file_name|>helpers_sync.py<|end_file_name|><|fim▁begin|>import os
from geotrek.flatpages.models import FlatPage
from geotrek.flatpages.views import FlatPageViewSet, FlatPageMeta
from django.db.models import Q
class SyncRando:
def __init__(self, sync):
self.global_sync = sync
<|fim▁hole|> self.global_sync.sync_geojson(lang, FlatPageViewSet, 'flatpages.geojson', zipfile=self.global_sync.zipfile)
flatpages = FlatPage.objects.filter(published=True)
if self.global_sync.source:
flatpages = flatpages.filter(source__name__in=self.global_sync.source)
if self.global_sync.portal:
flatpages = flatpages.filter(Q(portal__name=self.global_sync.portal) | Q(portal=None))
for flatpage in flatpages:
name = os.path.join('meta', lang, flatpage.rando_url, 'index.html')
self.global_sync.sync_view(lang, FlatPageMeta.as_view(), name, pk=flatpage.pk,
params={'rando_url': self.global_sync.rando_url})<|fim▁end|>
|
def sync(self, lang):
|
<|file_name|>test_period_asfreq.py<|end_file_name|><|fim▁begin|>import pytest
from pandas._libs.tslibs.frequencies import get_freq
from pandas._libs.tslibs.period import period_asfreq, period_ordinal
@pytest.mark.parametrize(
"freq1,freq2,expected",
[
("D", "H", 24),
("D", "T", 1440),
("D", "S", 86400),
("D", "L", 86400000),
("D", "U", 86400000000),
("D", "N", 86400000000000),
("H", "T", 60),
("H", "S", 3600),
("H", "L", 3600000),
("H", "U", 3600000000),
("H", "N", 3600000000000),
("T", "S", 60),
("T", "L", 60000),
("T", "U", 60000000),
("T", "N", 60000000000),
("S", "L", 1000),
("S", "U", 1000000),
("S", "N", 1000000000),
("L", "U", 1000),
("L", "N", 1000000),
("U", "N", 1000),
],
)
def test_intra_day_conversion_factors(freq1, freq2, expected):
assert period_asfreq(1, get_freq(freq1), get_freq(freq2), False) == expected
@pytest.mark.parametrize(
"freq,expected", [("A", 0), ("M", 0), ("W", 1), ("D", 0), ("B", 0)]
)
def test_period_ordinal_start_values(freq, expected):
# information for Jan. 1, 1970.
assert period_ordinal(1970, 1, 1, 0, 0, 0, 0, 0, get_freq(freq)) == expected
@pytest.mark.parametrize(
"dt,expected",
[
((1970, 1, 4, 0, 0, 0, 0, 0), 1),
((1970, 1, 5, 0, 0, 0, 0, 0), 2),
((2013, 10, 6, 0, 0, 0, 0, 0), 2284),
((2013, 10, 7, 0, 0, 0, 0, 0), 2285),
],
)
def test_period_ordinal_week(dt, expected):
args = dt + (get_freq("W"),)
assert period_ordinal(*args) == expected
@pytest.mark.parametrize(
"day,expected",
[
# Thursday (Oct. 3, 2013).
(3, 11415),
# Friday (Oct. 4, 2013).
(4, 11416),
# Saturday (Oct. 5, 2013).<|fim▁hole|> (7, 11417),
# Tuesday (Oct. 8, 2013).
(8, 11418),
],
)
def test_period_ordinal_business_day(day, expected):
args = (2013, 10, day, 0, 0, 0, 0, 0, get_freq("B"))
assert period_ordinal(*args) == expected<|fim▁end|>
|
(5, 11417),
# Sunday (Oct. 6, 2013).
(6, 11417),
# Monday (Oct. 7, 2013).
|
<|file_name|>instances.py<|end_file_name|><|fim▁begin|>#!python
import re
import sys
import logging
import boto.ec2
from texttable import Texttable
from pprint import PrettyPrinter
from optparse import OptionParser
PP = PrettyPrinter( indent=2 )
###################
### Arg parsing
###################
parser = OptionParser("usage: %prog [options]" )
parser.add_option( "-v", "--verbose", default=None, action="store_true",
help="enable debug output" )
parser.add_option( "-H", "--no-header", default=None, action="store_true",
help="suppress table header" )
parser.add_option( "-r", "--region", default='us-east-1',
help="ec2 region to connect to" )
parser.add_option( "-g", "--group", default=None,
help="Include instances from these groups only (regex)" )
parser.add_option( "-G", "--exclude-group",default=None,
help="Exclude instances from these groups (regex)" )
parser.add_option( "-n", "--name", default=None,
help="Include instances with these names only (regex)" )
parser.add_option( "-N", "--exclude-name", default=None,
help="Exclude instances with these names (regex)" )
parser.add_option( "-t", "--type", default=None,
help="Include instances with these types only (regex)" )
parser.add_option( "-T", "--exclude-type", default=None,
help="Exclude instances with these types (regex)" )
parser.add_option( "-z", "--zone", default=None,
help="Include instances with these zones only (regex)" )
parser.add_option( "-Z", "--exclude-zone", default=None,
help="Exclude instances with these zones (regex)" )
parser.add_option( "-s", "--state", default=None,
help="Include instances with these states only (regex)" )
parser.add_option( "-S", "--exclude-state",default=None,
help="Exclude instances with these states (regex)" )
(options, args) = parser.parse_args()
###################
### Logging
###################
if options.verbose: log_level = logging.DEBUG
else: log_level = logging.INFO
logging.basicConfig(stream=sys.stdout, level=log_level)
logging.basicConfig(stream=sys.stderr, level=(logging.ERROR,logging.CRITICAL))
###################
### Connection
###################
conn = boto.ec2.connect_to_region( options.region )
###################
### Regexes
###################
regexes = {}
for opt in [ 'group', 'exclude_group', 'name', 'exclude_name',
'type', 'exclude_type', 'zone', 'exclude_zone',
'state', 'exclude_state' ]:
### we have a regex we should build
if options.__dict__.get( opt, None ):
regexes[ opt ] = re.compile( options.__dict__.get( opt ), re.IGNORECASE )
#PP.pprint( regexes )
def get_instances():
instances = [ i for r in conn.get_all_instances()
for i in r.instances ]
rv = [];
for i in instances:
### we will assume this node is one of the nodes we want
### to operate on, and we will unset this flag if any of
### the criteria fail
wanted_node = True
for re_name, regex in regexes.iteritems():
### What's the value we will be testing against?
if re.search( 'group', re_name ):
value = i.groups[0].name
elif re.search( 'name', re_name ):
value = i.tags.get( 'Name', '' )
elif re.search( 'type', re_name ):
value = i.instance_type
elif re.search( 'state', re_name ):
value = i.state
elif re.search( 'zone', re_name ):
### i.region is an object. i._placement is a string.
value = str(i._placement)
else:
logging.error( "Don't know what to do with: %s" % re_name )
continue
#PP.pprint( "name = %s value = %s pattern = %s" % ( re_name, value, regex.pattern ) )
### Should the regex match or not match?
if re.search( 'exclude', re_name ):
rv_value = None
else:
rv_value = True
### if the match is not what we expect, then clearly we
### don't care about the node
result = regex.search( value )
### we expected to get no results, excellent
if result == None and rv_value == None:
pass
### we expected to get some match, excellent
elif result is not None and rv_value is not None:
pass
### we don't care about this node
else:
wanted_node = False
break
if wanted_node:
rv.append( i )
return rv
def list_instances():
table = Texttable( max_width=0 )
table.set_deco( Texttable.HEADER )
table.set_cols_dtype( [ 't', 't', 't', 't', 't', 't', 't', 't' ] )
table.set_cols_align( [ 'l', 'l', 'l', 'l', 'l', 'l', 'l', 't' ] )
if not options.no_header:
### using add_row, so the headers aren't being centered, for easier grepping
table.add_row(
[ '# id', 'Name', 'Type', 'Zone', 'Group', 'State', 'Root', 'Volumes' ] )
instances = get_instances()
for i in instances:
### XXX there's a bug where you can't get the size of the volumes, it's
### always reported as None :(
volumes = ", ".join( [ ebs.volume_id for ebs in i.block_device_mapping.values()
if ebs.delete_on_termination == False ] )
### you can use i.region instead of i._placement, but it pretty<|fim▁hole|> ### or texttable blows up with 'width must be greater than 0' error
table.add_row( [ i.id, i.tags.get( 'Name', ' ' ), i.instance_type,
i._placement , i.groups[0].name, i.state,
i.root_device_type, volumes or '-' ] )
#PP.pprint( i.__dict__ )
### table.draw() blows up if there is nothing to print
if instances or not options.no_header:
print table.draw()
if __name__ == '__main__':
list_instances()<|fim▁end|>
|
### prints to RegionInfo:us-east-1. For now, use the private version
### XXX EVERY column in this output had better have a non-zero length
|
<|file_name|>swap-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::util;
pub fn main() {
let mut x = 3; let mut y = 7;
util::swap(&mut x, &mut y);
assert!((x == 7)); assert!((y == 3));<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>TrackNavigator.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************************
* Copyright (c) 2007 Ian Monroe <[email protected]> *
* Copyright (c) 2008 Soren Harward <[email protected]> *<|fim▁hole|> * Copyright (c) 2009 Téo Mrnjavac <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify it under *
* the terms of the GNU General Public License as published by the Free Software *
* Foundation; either version 2 of the License, or (at your option) version 3 or *
* any later version accepted by the membership of KDE e.V. (or its successor approved *
* by the membership of KDE e.V.), which shall act as a proxy defined in Section 14 of *
* version 3 of the license. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A *
* PARTICULAR PURPOSE. See the GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License along with *
* this program. If not, see <http://www.gnu.org/licenses/>. *
****************************************************************************************/
#include "TrackNavigator.h"
#include "playlist/PlaylistModelStack.h"
#include "core/support/Amarok.h"
#include "amarokconfig.h"
#include "core/support/Debug.h"
#include <QQueue>
Playlist::TrackNavigator::TrackNavigator()
{
m_model = Playlist::ModelStack::instance()->top();
// Connect to the QAbstractItemModel signals of the source model.
// Ignore SIGNAL dataChanged: we don't need to know when a playlist item changes.
// Ignore SIGNAL layoutChanged: we don't need to know when rows are moved around.
connect( m_model->qaim(), SIGNAL( modelReset() ), this, SLOT( slotModelReset() ) );
connect( m_model->qaim(), SIGNAL( rowsAboutToBeRemoved( QModelIndex, int, int ) ), this, SLOT( slotRowsAboutToBeRemoved( QModelIndex, int, int ) ) );
// Ignore SIGNAL rowsInserted.
}
void
Playlist::TrackNavigator::queueId( const quint64 id )
{
QList<quint64> ids;
ids << id;
queueIds( ids );
}
void
Playlist::TrackNavigator::queueIds( const QList<quint64> &ids )
{
foreach( quint64 id, ids )
{
if( !m_queue.contains( id ) )
m_queue.enqueue( id );
}
}
void
Playlist::TrackNavigator::dequeueId( const quint64 id )
{
m_queue.removeAll( id );
}
bool
Playlist::TrackNavigator::queueMoveUp( const quint64 id )
{
const int idx = m_queue.indexOf( id );
if ( idx < 1 )
return false;
quint64 temp = m_queue[ idx - 1 ];
m_queue[ idx - 1 ] = m_queue[ idx ];
m_queue[ idx ] = temp;
return true;
}
bool
Playlist::TrackNavigator::queueMoveDown( const quint64 id )
{
const int idx = m_queue.indexOf( id );
if ( idx == -1 || idx == m_queue.count() - 1 )
return false;
quint64 temp = m_queue[ idx + 1 ];
m_queue[ idx + 1 ] = m_queue[ idx ];
m_queue[ idx ] = temp;
return true;
}
int
Playlist::TrackNavigator::queuePosition( const quint64 id ) const
{
return m_queue.indexOf( id );
}
QQueue<quint64> Playlist::TrackNavigator::queue()
{
return m_queue;
}
void
Playlist::TrackNavigator::slotModelReset()
{
DEBUG_BLOCK
m_queue.clear(); // We should check 'm_model's new contents, but this is unlikely to bother anyone.
}
void
Playlist::TrackNavigator::slotRowsAboutToBeRemoved(const QModelIndex& parent, int start, int end)
{
Q_UNUSED( parent );
for ( int row = start; row <= end; ++row )
{
const quint64 itemId = m_model->idAt( row );
m_queue.removeAll( itemId );
}
}
quint64
Playlist::TrackNavigator::bestFallbackItem()
{
quint64 item = m_model->activeId();
if ( !item )
if ( m_model->qaim()->rowCount() > 0 )
item = m_model->idAt( 0 );
return item;
}<|fim▁end|>
| |
<|file_name|>frontend_spec.js<|end_file_name|><|fim▁begin|>// # Frontend Route tests
// As it stands, these tests depend on the database, and as such are integration tests.
// Mocking out the models to not touch the DB would turn these into unit tests, and should probably be done in future,
// But then again testing real code, rather than mock code, might be more useful...
const should = require('should');
const sinon = require('sinon');
const supertest = require('supertest');
const moment = require('moment');
const cheerio = require('cheerio');
const _ = require('lodash');
const testUtils = require('../../utils');
const configUtils = require('../../utils/configUtils');
const urlUtils = require('../../utils/urlUtils');
const config = require('../../../core/shared/config');
const settingsCache = require('../../../core/server/services/settings/cache');
const origCache = _.cloneDeep(settingsCache);
const ghost = testUtils.startGhost;
let request;
describe('Frontend Routing', function () {
function doEnd(done) {
return function (err, res) {
if (err) {
return done(err);
}
should.not.exist(res.headers['x-cache-invalidate']);
should.not.exist(res.headers['X-CSRF-Token']);
should.not.exist(res.headers['set-cookie']);
should.exist(res.headers.date);
done();
};
}
function addPosts(done) {
testUtils.clearData().then(function () {
return testUtils.initData();
}).then(function () {
return testUtils.fixtures.insertPostsAndTags();
}).then(function () {
done();
});
}
afterEach(function () {
sinon.restore();
});
before(function () {
return ghost()
.then(function () {
request = supertest.agent(config.get('url'));
});
});
describe('Test with Initial Fixtures', function () {
describe('Error', function () {
it('should 404 for unknown post with invalid characters', function (done) {
request.get('/$pec+acular~/')
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(404)
.expect(/Page not found/)
.end(doEnd(done));
});
it('should 404 for unknown frontend route', function (done) {
request.get('/spectacular/marvellous/')
.set('Accept', 'application/json')
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(404)
.expect(/Page not found/)
.end(doEnd(done));
});
it('should 404 for encoded char not 301 from uncapitalise', function (done) {
request.get('/|/')
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(404)
.expect(/Page not found/)
.end(doEnd(done));
});
});
describe('Default Redirects (clean URLS)', function () {
it('Single post should redirect without slash', function (done) {
request.get('/welcome')
.expect('Location', '/welcome/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('Single post should redirect uppercase', function (done) {
request.get('/Welcome/')
.expect('Location', '/welcome/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('Single post should sanitize double slashes when redirecting uppercase', function (done) {
request.get('///Google.com/')
.expect('Location', '/google.com/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('AMP post should redirect without slash', function (done) {
request.get('/welcome/amp')
.expect('Location', '/welcome/amp/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('AMP post should redirect uppercase', function (done) {
request.get('/Welcome/AMP/')
.expect('Location', '/welcome/amp/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
});
});
describe('Test with added posts', function () {
before(addPosts);
describe('Static page', function () {
it('should respond with html', function (done) {
request.get('/static-page-test/')
.expect('Content-Type', /html/)
.expect('Cache-Control', testUtils.cacheRules.public)
.expect(200)
.end(function (err, res) {
const $ = cheerio.load(res.text);
should.not.exist(res.headers['x-cache-invalidate']);
should.not.exist(res.headers['X-CSRF-Token']);
should.not.exist(res.headers['set-cookie']);
should.exist(res.headers.date);
$('title').text().should.equal('This is a static page');
$('body.page-template').length.should.equal(1);
$('article.post').length.should.equal(1);
doEnd(done)(err, res);
});
});
it('should redirect without slash', function (done) {
request.get('/static-page-test')
.expect('Location', '/static-page-test/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
describe('edit', function () {
it('should redirect without slash', function (done) {
request.get('/static-page-test/edit')
.expect('Location', '/static-page-test/edit/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('should redirect to editor', function (done) {
request.get('/static-page-test/edit/')
.expect('Location', /ghost\/#\/editor\/\w+/)
.expect('Cache-Control', testUtils.cacheRules.public)
.expect(302)
.end(doEnd(done));
});
it('should 404 for non-edit parameter', function (done) {
request.get('/static-page-test/notedit/')
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(404)
.expect(/Page not found/)
.end(doEnd(done));
});
});
describe('edit with admin redirects disabled', function () {
before(function (done) {
configUtils.set('admin:redirects', false);
ghost({forceStart: true})
.then(function () {
request = supertest.agent(config.get('url'));
addPosts(done);
});
});
after(function (done) {
configUtils.restore();
ghost({forceStart: true})
.then(function () {
request = supertest.agent(config.get('url'));
addPosts(done);
});
});
it('should redirect without slash', function (done) {
request.get('/static-page-test/edit')
.expect('Location', '/static-page-test/edit/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
it('should not redirect to editor', function (done) {
request.get('/static-page-test/edit/')
.expect(404)
.expect('Cache-Control', testUtils.cacheRules.private)
.end(doEnd(done));
});
});
describe('amp', function () {
it('should 404 for amp parameter', function (done) {
// NOTE: only post pages are supported so the router doesn't have a way to distinguish if
// the request was done after AMP 'Page' or 'Post'
request.get('/static-page-test/amp/')
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(404)
.expect(/Post not found/)
.end(doEnd(done));
});
});
});
describe('Post preview', function () {
it('should display draft posts accessed via uuid', function (done) {
request.get('/p/d52c42ae-2755-455c-80ec-70b2ec55c903/')
.expect('Content-Type', /html/)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
const $ = cheerio.load(res.text);
should.not.exist(res.headers['x-cache-invalidate']);
should.not.exist(res.headers['X-CSRF-Token']);
should.not.exist(res.headers['set-cookie']);
should.exist(res.headers.date);
$('title').text().should.equal('Not finished yet');
// @TODO: use theme from fixtures and don't rely on content/themes/casper
// $('.content .post').length.should.equal(1);
// $('.poweredby').text().should.equal('Proudly published with Ghost');
// $('body.post-template').length.should.equal(1);
// $('article.post').length.should.equal(1);
done();
});
});
it('should redirect published posts to their live url', function (done) {
request.get('/p/2ac6b4f6-e1f3-406c-9247-c94a0496d39d/')
.expect(301)
.expect('Location', '/short-and-sweet/')
.expect('Cache-Control', testUtils.cacheRules.year)
.end(doEnd(done));
});
it('404s unknown uuids', function (done) {
request.get('/p/aac6b4f6-e1f3-406c-9247-c94a0496d39f/')
.expect(404)
.end(doEnd(done));
});
});
describe('Post with Ghost in the url', function () {
// All of Ghost's admin depends on the /ghost/ in the url to work properly
// Badly formed regexs can cause breakage if a post slug starts with the 5 letters ghost
it('should retrieve a blog post with ghost at the start of the url', function (done) {
request.get('/ghostly-kitchen-sink/')
.expect('Cache-Control', testUtils.cacheRules.public)
.expect(200)
.end(doEnd(done));
});
});
});
describe('Subdirectory (no slash)', function () {
let ghostServer;
before(function () {
configUtils.set('url', 'http://localhost/blog');
urlUtils.stubUrlUtilsFromConfig();
return ghost({forceStart: true, subdir: true})
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('server:host') + ':' + config.get('server:port'));
});
});
after(function () {
configUtils.restore();
urlUtils.restore();
});
it('http://localhost should 404', function (done) {
request.get('/')
.expect(404)
.end(doEnd(done));
});
it('http://localhost/ should 404', function (done) {
request.get('/')
.expect(404)
.end(doEnd(done));
});
it('http://localhost/blog should 301 to http://localhost/blog/', function (done) {
request.get('/blog')
.expect(301)
.expect('Location', '/blog/')
.end(doEnd(done));
});
it('http://localhost/blog/ should 200', function (done) {
request.get('/blog/')
.expect(200)
.end(doEnd(done));
});
it('http://localhost/blog/welcome should 301 to http://localhost/blog/welcome/', function (done) {
request.get('/blog/welcome')
.expect(301)
.expect('Location', '/blog/welcome/')
.expect('Cache-Control', testUtils.cacheRules.year)
.end(doEnd(done));
});<|fim▁hole|>
it('http://localhost/blog/welcome/ should 200', function (done) {
request.get('/blog/welcome/')
.expect(200)
.end(doEnd(done));
});
it('/blog/tag/getting-started should 301 to /blog/tag/getting-started/', function (done) {
request.get('/blog/tag/getting-started')
.expect(301)
.expect('Location', '/blog/tag/getting-started/')
.expect('Cache-Control', testUtils.cacheRules.year)
.end(doEnd(done));
});
it('/blog/tag/getting-started/ should 200', function (done) {
request.get('/blog/tag/getting-started/')
.expect(200)
.end(doEnd(done));
});
it('/blog/welcome/amp/ should 200', function (done) {
request.get('/blog/welcome/amp/')
.expect(200)
.end(doEnd(done));
});
});
describe('Subdirectory (with slash)', function () {
let ghostServer;
before(function () {
configUtils.set('url', 'http://localhost/blog/');
urlUtils.stubUrlUtilsFromConfig();
return ghost({forceStart: true, subdir: true})
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('server:host') + ':' + config.get('server:port'));
});
});
after(function () {
configUtils.restore();
urlUtils.restore();
});
it('http://localhost should 404', function (done) {
request.get('/')
.expect(404)
.end(doEnd(done));
});
it('http://localhost/ should 404', function (done) {
request.get('/')
.expect(404)
.end(doEnd(done));
});
it('/blog should 301 to /blog/', function (done) {
request.get('/blog')
.expect(301)
.expect('Location', '/blog/')
.end(doEnd(done));
});
it('/blog/ should 200', function (done) {
request.get('/blog/')
.expect(200)
.end(doEnd(done));
});
it('/blog/welcome should 301 to /blog/welcome/', function (done) {
request.get('/blog/welcome')
.expect(301)
.expect('Location', '/blog/welcome/')
.expect('Cache-Control', testUtils.cacheRules.year)
.end(doEnd(done));
});
it('/blog/welcome/ should 200', function (done) {
request.get('/blog/welcome/')
.expect(200)
.end(doEnd(done));
});
it('/blog/tag/getting-started should 301 to /blog/tag/getting-started/', function (done) {
request.get('/blog/tag/getting-started')
.expect(301)
.expect('Location', '/blog/tag/getting-started/')
.expect('Cache-Control', testUtils.cacheRules.year)
.end(doEnd(done));
});
it('/blog/tag/getting-started/ should 200', function (done) {
request.get('/blog/tag/getting-started/')
.expect(200)
.end(doEnd(done));
});
it('/blog/welcome/amp/ should 200', function (done) {
request.get('/blog/welcome/amp/')
.expect(200)
.end(doEnd(done));
});
it('should uncapitalise correctly with 301 to subdir', function (done) {
request.get('/blog/AAA/')
.expect('Location', '/blog/aaa/')
.expect('Cache-Control', testUtils.cacheRules.year)
.expect(301)
.end(doEnd(done));
});
});
// we'll use X-Forwarded-Proto: https to simulate an 'https://' request behind a proxy
describe('HTTPS', function () {
let ghostServer;
before(function () {
configUtils.set('url', 'http://localhost:2370/');
urlUtils.stubUrlUtilsFromConfig();
return ghost({forceStart: true})
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('server:host') + ':' + config.get('server:port'));
});
});
after(function () {
configUtils.restore();
urlUtils.restore();
});
it('should set links to url over non-HTTPS', function (done) {
request.get('/')
.expect(200)
.expect(/<link rel="canonical" href="http:\/\/localhost:2370\/" \/\>/)
.expect(/<a href="http:\/\/localhost:2370">Ghost<\/a\>/)
.end(doEnd(done));
});
it('should set links over HTTPS besides canonical', function (done) {
request.get('/')
.set('X-Forwarded-Proto', 'https')
.expect(200)
.expect(/<link rel="canonical" href="http:\/\/localhost:2370\/" \/\>/)
.expect(/<a href="https:\/\/localhost:2370">Ghost<\/a\>/)
.end(doEnd(done));
});
});
// TODO: convert to unit tests
describe('Redirects (use redirects.json from test/utils/fixtures/data)', function () {
let ghostServer;
before(function () {
configUtils.set('url', 'http://localhost:2370/');
urlUtils.stubUrlUtilsFromConfig();
return ghost({forceStart: true})
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('server:host') + ':' + config.get('server:port'));
});
});
after(function () {
configUtils.restore();
urlUtils.restore();
});
describe('1 case', function () {
it('with trailing slash', function (done) {
request.get('/post/10/a-nice-blog-post')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/a-nice-blog-post');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/post/10/a-nice-blog-post/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/a-nice-blog-post');
doEnd(done)(err, res);
});
});
it('with query params', function (done) {
request.get('/topic?something=good')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/?something=good');
doEnd(done)(err, res);
});
});
it('with query params', function (done) {
request.get('/post/10/a-nice-blog-post?a=b')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/a-nice-blog-post?a=b');
doEnd(done)(err, res);
});
});
it('with case insensitive', function (done) {
request.get('/CaSe-InSeNsItIvE')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/redirected-insensitive');
doEnd(done)(err, res);
});
});
it('with case sensitive', function (done) {
request.get('/Case-Sensitive')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/redirected-sensitive');
doEnd(done)(err, res);
});
});
it('defaults to case sensitive', function (done) {
request.get('/Default-Sensitive')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/redirected-default');
doEnd(done)(err, res);
});
});
it('should not redirect with case sensitive', function (done) {
request.get('/casE-sensitivE')
.end(function (err, res) {
res.headers.location.should.not.eql('/redirected-sensitive');
res.statusCode.should.not.eql(302);
doEnd(done)(err, res);
});
});
it('should not redirect with default case sensitive', function (done) {
request.get('/defaulT-sensitivE')
.end(function (err, res) {
res.headers.location.should.not.eql('/redirected-default');
res.statusCode.should.not.eql(302);
doEnd(done)(err, res);
});
});
});
describe('2 case', function () {
it('with trailing slash', function (done) {
request.get('/my-old-blog-post/')
.expect(301)
.expect('Cache-Control', testUtils.cacheRules.year)
.end(function (err, res) {
res.headers.location.should.eql('/revamped-url/');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/my-old-blog-post')
.expect(301)
.expect('Cache-Control', testUtils.cacheRules.year)
.end(function (err, res) {
res.headers.location.should.eql('/revamped-url/');
doEnd(done)(err, res);
});
});
});
describe('3 case', function () {
it('with trailing slash', function (done) {
request.get('/what/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/what-does-god-say');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/what')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/what-does-god-say');
doEnd(done)(err, res);
});
});
});
describe('4 case', function () {
it('with trailing slash', function (done) {
request.get('/search/label/&&&/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/tag/&&&/');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/search/label/&&&/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/tag/&&&/');
doEnd(done)(err, res);
});
});
});
describe('5 case', function () {
it('with trailing slash', function (done) {
request.get('/topic/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/topic')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/');
doEnd(done)(err, res);
});
});
});
describe('6 case', function () {
it('with trailing slash', function (done) {
request.get('/resources/download/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/shubal-stearns');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/resources/download')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/shubal-stearns');
doEnd(done)(err, res);
});
});
});
describe('7 case', function () {
it('with trailing slash', function (done) {
request.get('/2016/11/welcome.html')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/welcome');
doEnd(done)(err, res);
});
});
});
describe('last case', function () {
it('default', function (done) {
request.get('/prefix/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/blog/');
doEnd(done)(err, res);
});
});
it('with a custom path', function (done) {
request.get('/prefix/expect-redirect')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/blog/expect-redirect');
doEnd(done)(err, res);
});
});
});
describe('external url redirect', function () {
it('with trailing slash', function (done) {
request.get('/external-url/')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('https://ghost.org/');
doEnd(done)(err, res);
});
});
it('without trailing slash', function (done) {
request.get('/external-url')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('https://ghost.org/');
doEnd(done)(err, res);
});
});
it('with capturing group', function (done) {
request.get('/external-url/docs')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('https://ghost.org/docs');
doEnd(done)(err, res);
});
});
});
});
describe('Subdirectory redirects (use redirects.json from test/utils/fixtures/data)', function () {
var ghostServer;
before(function () {
configUtils.set('url', 'http://localhost:2370/blog/');
urlUtils.stubUrlUtilsFromConfig();
return ghost({forceStart: true, subdir: true})
.then(function (_ghostServer) {
ghostServer = _ghostServer;
request = supertest.agent(config.get('server:host') + ':' + config.get('server:port'));
});
});
after(function () {
configUtils.restore();
urlUtils.restore();
});
describe('internal url redirect', function () {
it('should include the subdirectory', function (done) {
request.get('/blog/my-old-blog-post/')
.expect(301)
.expect('Cache-Control', testUtils.cacheRules.year)
.end(function (err, res) {
res.headers.location.should.eql('/blog/revamped-url/');
doEnd(done)(err, res);
});
});
it('should work with regex "from" redirects', function (done) {
request.get('/blog/capture1/whatever')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('/blog/whatever');
doEnd(done)(err, res);
});
});
});
describe('external url redirect', function () {
it('should not include the subdirectory', function (done) {
request.get('/blog/external-url/docs')
.expect(302)
.expect('Cache-Control', testUtils.cacheRules.public)
.end(function (err, res) {
res.headers.location.should.eql('https://ghost.org/docs');
doEnd(done)(err, res);
});
});
});
});
});<|fim▁end|>
| |
<|file_name|>asr1k_routertype_driver.py<|end_file_name|><|fim▁begin|># Copyright 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from sqlalchemy.orm import exc
from sqlalchemy.sql import expression as expr
from neutron.db import models_v2
from neutron.extensions import l3
from neutron_lib import constants as l3_constants
from neutron_lib import exceptions as n_exc
from networking_cisco._i18n import _
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import cisco_constants
from networking_cisco.plugins.cisco.db.l3 import ha_db
from networking_cisco.plugins.cisco.db.l3 import l3_models
from networking_cisco.plugins.cisco.db.l3.l3_router_appliance_db import (
L3RouterApplianceDBMixin)
from networking_cisco.plugins.cisco.extensions import routerhostingdevice
from networking_cisco.plugins.cisco.extensions import routerrole
from networking_cisco.plugins.cisco.extensions import routertype
from networking_cisco.plugins.cisco.extensions import routertypeawarescheduler
from networking_cisco.plugins.cisco.l3 import drivers
LOG = logging.getLogger(__name__)
DEVICE_OWNER_GLOBAL_ROUTER_GW = cisco_constants.DEVICE_OWNER_GLOBAL_ROUTER_GW
HOSTING_DEVICE_ATTR = routerhostingdevice.HOSTING_DEVICE_ATTR
ROUTER_ROLE_GLOBAL = cisco_constants.ROUTER_ROLE_GLOBAL
ROUTER_ROLE_LOGICAL_GLOBAL = cisco_constants.ROUTER_ROLE_LOGICAL_GLOBAL
ROUTER_ROLE_HA_REDUNDANCY = cisco_constants.ROUTER_ROLE_HA_REDUNDANCY
TENANT_HSRP_GRP_RANGE = 1
TENANT_HSRP_GRP_OFFSET = 1064
EXT_HSRP_GRP_RANGE = 1
EXT_HSRP_GRP_OFFSET = 1064
N_ROUTER_PREFIX = 'nrouter-'
DEV_NAME_LEN = 14
class TopologyNotSupportedByRouterError(n_exc.Conflict):
message = _("Requested topology cannot be supported by router.")
class ASR1kL3RouterDriver(drivers.L3RouterBaseDriver):
def create_router_precommit(self, context, router_context):
pass
def create_router_postcommit(self, context, router_context):
pass
def update_router_precommit(self, context, router_context):
pass
def update_router_postcommit(self, context, router_context):
# Whenever a gateway is added to, or removed from, a router hosted on
# a hosting device, we must ensure that a global router is running
# (for add operation) or not running (for remove operation) on that
# hosting device.
current = router_context.current
if current[HOSTING_DEVICE_ATTR] is None:
return
e_context = context.elevated()
if current['gw_port_id']:
self._conditionally_add_global_router(e_context, current)
else:
self._conditionally_remove_global_router(
e_context, router_context.original, True)
def delete_router_precommit(self, context, router_context):
pass
def delete_router_postcommit(self, context, router_context):
pass
def schedule_router_precommit(self, context, router_context):
pass
def schedule_router_postcommit(self, context, router_context):
# When the hosting device hosts a Neutron router with external
# connectivity, a "global" router (modeled as a Neutron router) must
# also run on the hosting device (outside of any VRF) to enable the
# connectivity.
current = router_context.current
if current['gw_port_id'] and current[HOSTING_DEVICE_ATTR] is not None:
self._conditionally_add_global_router(context.elevated(), current)
def unschedule_router_precommit(self, context, router_context):
pass
def unschedule_router_postcommit(self, context, router_context):
# When there is no longer any router with external gateway hosted on
# a hosting device, the global router on that hosting device can also
# be removed.
current = router_context.current
hd_id = current[HOSTING_DEVICE_ATTR]
if current['gw_port_id'] and hd_id is not None:
self._conditionally_remove_global_router(context.elevated(),
current)
def add_router_interface_precommit(self, context, r_port_context):
# Inside an ASR1k, VLAN sub-interfaces are used to connect to internal
# neutron networks. Only one such sub-interface can be created for each
# VLAN. As the VLAN sub-interface is added to the VRF representing the
# Neutron router, we must only allow one Neutron router to attach to a
# particular Neutron subnet/network.
if (r_port_context.router_context.current[routerrole.ROUTER_ROLE_ATTR]
== ROUTER_ROLE_HA_REDUNDANCY):
# redundancy routers can be exempt as we check the user visible
# routers and the request will be rejected there.
return
e_context = context.elevated()
if r_port_context.current is None:
sn = self._core_plugin.get_subnet(e_context,
r_port_context.current_subnet_id)
net_id = sn['network_id']
else:
net_id = r_port_context.current['network_id']
router_id = r_port_context.router_context.current['id']
filters = {'network_id': [net_id],
'device_owner': [bc.constants.DEVICE_OWNER_ROUTER_INTF]}
for port in self._core_plugin.get_ports(e_context, filters=filters):
device_id = port['device_id']
if device_id is None:
continue
try:
router = self._l3_plugin.get_router(e_context, device_id)
if (router[routerrole.ROUTER_ROLE_ATTR] is None and
router['id'] != router_id):
# only a single router can connect to multiple subnets
# on the same internal network
raise TopologyNotSupportedByRouterError()
except n_exc.NotFound:
if self._l3_plugin.get_ha_group(e_context, device_id):
# Since this is a port for the HA VIP address, we can
# safely ignore it
continue
else:
LOG.warning(
'Spurious router port %s prevents attachement from'
' being performed. Try attaching again later, and '
'if the operation then fails again, remove the '
'spurious port', port['id'])
raise TopologyNotSupportedByRouterError()
def add_router_interface_postcommit(self, context, r_port_context):
pass
def remove_router_interface_precommit(self, context, r_port_context):
pass
def remove_router_interface_postcommit(self, context, r_port_context):
pass
def create_floatingip_precommit(self, context, fip_context):
pass
def create_floatingip_postcommit(self, context, fip_context):
pass
def update_floatingip_precommit(self, context, fip_context):
pass
def update_floatingip_postcommit(self, context, fip_context):
pass
def delete_floatingip_precommit(self, context, fip_context):
pass
def delete_floatingip_postcommit(self, context, fip_context):
pass
def ha_interface_ip_address_needed(self, context, router, port,
ha_settings_db, ha_group_uuid):
if port['device_owner'] == bc.constants.DEVICE_OWNER_ROUTER_GW:
return False
else:
return True
def generate_ha_group_id(self, context, router, port, ha_settings_db,
ha_group_uuid):
if port['device_owner'] in {bc.constants.DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_GLOBAL_ROUTER_GW}:
ri_name = self._router_name(router['id'])[8:DEV_NAME_LEN]
group_id = int(ri_name, 16) % TENANT_HSRP_GRP_RANGE
group_id += TENANT_HSRP_GRP_OFFSET
return group_id
else:
net_id_digits = port['network_id'][:6]
group_id = int(net_id_digits, 16) % EXT_HSRP_GRP_RANGE
group_id += EXT_HSRP_GRP_OFFSET
return group_id
def pre_backlog_processing(self, context):
LOG.info('Performing pre-backlog processing')
filters = {routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(context, filters=filters)
if not global_routers:
LOG.debug("There are no global routers")
return
for gr in global_routers:
filters = {
HOSTING_DEVICE_ATTR: [gr[HOSTING_DEVICE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_HA_REDUNDANCY, None]
}
invert_filters = {'gw_port_id': [None]}
num_rtrs = self._l3_plugin.get_routers_count_extended(
context, filters=filters, invert_filters=invert_filters)
LOG.debug("Global router %(name)s[%(id)s] with hosting_device "
"%(hd)s has %(num)d routers with gw_port set on that "
"device",
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR], 'num': num_rtrs, })
if num_rtrs == 0:
LOG.info(
"Global router %(name)s[id:%(id)s] is present for "
"hosting device %(hd)s but there are no tenant or "
"redundancy routers with gateway set on that hosting "
"device. Proceeding to delete global router.",
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR]})
self._delete_global_router(context, gr['id'])
filters = {
#TODO(bmelande): Filter on routertype of global router
#routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if log_global_routers:
log_global_router_id = log_global_routers[0]['id']
self._delete_global_router(context, log_global_router_id,
logical=True)
def post_backlog_processing(self, context):
pass
# ---------------- Create workflow functions -----------------
def _conditionally_add_global_router(self, context, tenant_router):
# We could filter on hosting device id but we don't so we get all
# global routers for this router type. We can then use that count to
# determine which ha priority a new global router should get.
filters = {
routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(
context, filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
hosting_device_id = tenant_router[HOSTING_DEVICE_ATTR]
ext_nw_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
global_router = hd_to_gr_dict.get(hosting_device_id)
logical_global_router = self._get_logical_global_router(context,
tenant_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, logical_global_router, ext_nw_id, tenant_router, True)
if global_router is None:
# must create global router on hosting device
global_router = self._create_global_router(
context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, global_router, ext_nw_id, tenant_router)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
for ni in self._l3_plugin.get_notifiers(context, [global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_add_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
provision_ha=False, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# tbe global router may or may not have an interface on the
# external network that the tenant router uses
filters = {
'device_id': [global_router['id']],
'device_owner': [port_type]}
ext_net_port = {
p['network_id']: p for p in
self._core_plugin.get_ports(context, filters=filters)}
if ext_net_id in ext_net_port:
# already connected to the external network, called if
# new subnets are added to the network
aux_gw_port = self._update_auxiliary_external_gateway_port(
context, global_router, ext_net_id, ext_net_port)
if provision_ha:
for subnet in aux_gw_port[ext_net_id]['fixed_ips']:
self._provision_port_ha(context, aux_gw_port[ext_net_id],
subnet, global_router)
else:
# not connected to the external network, so let's fix that
aux_gw_port = self._create_auxiliary_external_gateway_port(
context, global_router, ext_net_id, tenant_router, port_type)
if provision_ha:
for subnet in aux_gw_port['fixed_ips']:
self._provision_port_ha(context, aux_gw_port, subnet,
global_router)
def _update_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, port):
# When a new subnet is added to an external network, the auxillary
# gateway port in the global router must be updated with the new
# subnet_id so an ip from that subnet is assigned to the gateway port
ext_network = self._core_plugin.get_network(context, ext_net_id)
fixed_ips = port[ext_net_id]['fixed_ips']
# fetch the subnets the port is currently connected to
subnet_id_list = [fixedip['subnet_id'] for fixedip in fixed_ips]
# add the new subnet
for subnet_id in ext_network['subnets']:
if subnet_id not in subnet_id_list:
fixed_ip = {'subnet_id': subnet_id}
fixed_ips.append(fixed_ip)
self._core_plugin.update_port(context, port[ext_net_id]['id'],
({'port': {'fixed_ips':
fixed_ips}}))
return port
def _create_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# When a global router is connected to an external network then a
# special type of gateway port is created on that network. Such a
# port is called auxiliary gateway ports. It has an ip address on
# each subnet of the external network. A (logical) global router
# never has a traditional Neutron gateway port.
filters = {
'device_id': [tenant_router['id']],
'device_owner': [l3_constants.DEVICE_OWNER_ROUTER_GW]}
# fetch the gateway port of the *tenant* router so we can determine
# the CIDR of that port's subnet
gw_port = self._core_plugin.get_ports(context,
filters=filters)[0]
fixed_ips = self._get_fixed_ips_subnets(context, gw_port)
global_router_id = global_router['id']
aux_gw_port = self._core_plugin.create_port(context, {
'port': {
'tenant_id': '', # intentionally not set
'network_id': ext_net_id,
'mac_address': bc.constants.ATTR_NOT_SPECIFIED,
'fixed_ips': fixed_ips,
'device_id': global_router_id,
'device_owner': port_type,
'admin_state_up': True,
'name': ''}})
router_port = bc.RouterPort(
port_id=aux_gw_port['id'],
router_id=global_router_id,
port_type=port_type)
context.session.add(router_port)
return aux_gw_port
def _create_global_router(
self, context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name(hosting_device_id),
'admin_state_up': True}}
global_router, r_hd_b_db = self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR], False,
True, hosting_device_id, ROUTER_ROLE_GLOBAL)
# make the global router a redundancy router for the logical
# global router (which we treat as a hidden "user visible
# router" (how's that for a contradiction of terms! :-) )
with context.session.begin(subtransactions=True):
ha_priority = (
ha_db.DEFAULT_MASTER_PRIORITY -
len(hd_to_gr_dict) * ha_db.PRIORITY_INCREASE_STEP)
r_b_b = ha_db.RouterRedundancyBinding(
redundancy_router_id=global_router['id'],
priority=ha_priority,
user_router_id=logical_global_router['id'])
context.session.add(r_b_b)
return global_router
def _get_logical_global_router(self, context, tenant_router):
# Since HA is also enabled on the global routers on each hosting device
# those global routers need HA settings and VIPs. We represent that
# using a Neutron router that is never instantiated/hosted. That
# Neutron router is referred to as the "logical global" router.
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
logical_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if not logical_global_routers:
# must create logical global router
logical_global_router = self._create_logical_global_router(
context, tenant_router)
else:
logical_global_router = logical_global_routers[0]
self._update_ha_redundancy_level(context, logical_global_router, 1)
return logical_global_router
def _create_logical_global_router(self, context, tenant_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name('', logical=True),
'admin_state_up': True,
# set auto-schedule to false to keep this router un-hosted
routertypeawarescheduler.AUTO_SCHEDULE_ATTR: False}}
# notifications should never be sent for this logical router!
logical_global_router, r_hd_b_db = (
self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR],
False, True, None, ROUTER_ROLE_LOGICAL_GLOBAL))
with context.session.begin(subtransactions=True):
r_ha_s_db = ha_db.RouterHASetting(
router_id=logical_global_router['id'],
ha_type=cfg.CONF.ha.default_ha_mechanism,
redundancy_level=1,
priority=ha_db.DEFAULT_MASTER_PRIORITY,
probe_connectivity=False,
probe_target=None,
probe_interval=None)
context.session.add(r_ha_s_db)
return logical_global_router
def _get_fixed_ips_subnets(self, context, gw_port):
nw = self._core_plugin.get_network(context, gw_port['network_id'])
subnets = [{'subnet_id': s} for s in nw['subnets']]
return subnets
def _provision_port_ha(self, context, ha_port, subnet, router,
ha_binding_db=None):
ha_group_uuid = uuidutils.generate_uuid()
router_id = router['id']
with context.session.begin(subtransactions=True):
ha_subnet_group = self._get_ha_group_by_ha_port_subnet_id(
context, ha_port['id'], subnet['subnet_id'])
if ha_subnet_group is not None:
return
if ha_binding_db is None:
ha_binding_db = self._get_ha_binding(context, router_id)
group_id = self.generate_ha_group_id(
context, router,
{'device_owner': DEVICE_OWNER_GLOBAL_ROUTER_GW}, ha_binding_db,
ha_group_uuid)
r_ha_g = ha_db.RouterHAGroup(
id=ha_group_uuid,
tenant_id='',
ha_type=ha_binding_db.ha_type,
group_identity=group_id,
ha_port_id=ha_port['id'],
extra_port_id=None,
subnet_id=subnet['subnet_id'],
user_router_id=router_id,
timers_config='',
tracking_config='',
other_config='')
context.session.add(r_ha_g)
def _get_ha_binding(self, context, router_id):
with context.session.begin(subtransactions=True):
query = context.session.query(ha_db.RouterHASetting)
query = query.filter(
ha_db.RouterHASetting.router_id == router_id)
return query.first()
def _get_ha_group_by_ha_port_subnet_id(self, context, port_id, subnet_id):
with context.session.begin(subtransactions=True):
query = context.session.query(ha_db.RouterHAGroup)
query = query.filter(ha_db.RouterHAGroup.ha_port_id == port_id,
ha_db.RouterHAGroup.subnet_id == subnet_id)
try:
r_ha_g = query.one()
except (exc.NoResultFound, exc.MultipleResultsFound):
return
return r_ha_g
# ---------------- Remove workflow functions -----------------
def _conditionally_remove_global_router(self, context, tenant_router,
update_operation=False):
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL],
HOSTING_DEVICE_ATTR: [tenant_router[HOSTING_DEVICE_ATTR]]}
global_routers = self._l3_plugin.get_routers(context,
filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
if global_routers:
global_router_id = global_routers[0]['id']
if not tenant_router or not tenant_router[l3.EXTERNAL_GW_INFO]:
# let l3 plugin's periodic backlog processing take care of the
# clean up of the global router<|fim▁hole|> ext_net_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
routertype_id = tenant_router[routertype.TYPE_ATTR]
hd_id = tenant_router[HOSTING_DEVICE_ATTR]
global_router = hd_to_gr_dict.get(hd_id)
port_deleted = self._conditionally_remove_auxiliary_gateway_port(
context, global_router_id, ext_net_id, routertype_id, hd_id,
update_operation)
if port_deleted is False:
# since no auxiliary gateway port was deleted we can
# abort no since auxiliary gateway port count cannot
# have reached zero
return
filters = {
'device_id': [global_router_id],
'device_owner': [DEVICE_OWNER_GLOBAL_ROUTER_GW]}
num_aux_gw_ports = self._core_plugin.get_ports_count(
context, filters=filters)
if num_aux_gw_ports == 0:
# global router not needed any more so we delete it
self._delete_global_router(context, global_router_id)
do_notify = False
else:
do_notify = True
# process logical global router to remove its port
self._conditionally_remove_auxiliary_gateway_vip_port(
context, ext_net_id, routertype_id)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
if do_notify is True:
for ni in self._l3_plugin.get_notifiers(context,
[global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_remove_auxiliary_gateway_port(
self, context, router_id, ext_net_id, routertype_id,
hosting_device_id, update_operation=False):
num_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, None, hosting_device_id)
if ((num_rtrs <= 1 and update_operation is False) or
(num_rtrs == 0 and update_operation is True)):
# there are no tenant routers *on ext_net_id* that are serviced by
# this global router so it's aux gw port can be deleted
self._delete_auxiliary_gateway_ports(context, router_id,
ext_net_id)
return True
return False
def _conditionally_remove_auxiliary_gateway_vip_port(
self, context, ext_net_id, routertype_id):
filters = {routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(context,
filters=filters)
if not log_global_routers:
return
self._update_ha_redundancy_level(context, log_global_routers[0], -1)
log_global_router_id = log_global_routers[0]['id']
num_global_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, ROUTER_ROLE_GLOBAL)
if num_global_rtrs == 0:
# there are no global routers *on ext_net_id* that are serviced by
# this logical global router so it's aux gw VIP port can be deleted
self._delete_auxiliary_gateway_ports(context, log_global_router_id,
ext_net_id)
filters[routerrole.ROUTER_ROLE_ATTR] = [ROUTER_ROLE_GLOBAL]
total_num_global_rtrs = self._l3_plugin.get_routers_count(
context, filters=filters)
if total_num_global_rtrs == 0:
# there are no global routers left that are serviced by this
# logical global router so it can be deleted
self._delete_global_router(context, log_global_router_id, True)
return False
def _delete_auxiliary_gateway_ports(
self, context, router_id, net_id=None,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
filters = {
'device_id': [router_id],
'device_owner': [port_type]}
if net_id is not None:
filters['network_id'] = [net_id]
for port in self._core_plugin.get_ports(context, filters=filters):
try:
self._core_plugin.delete_port(context, port['id'],
l3_port_check=False)
except (exc.ObjectDeletedError, n_exc.PortNotFound) as e:
LOG.info('Unable to delete port for Global router '
'%(r_id)s. It has likely been concurrently '
'deleted. %(err)s', {'r_id': router_id,
'err': e})
def _delete_global_router(self, context, global_router_id, logical=False):
# ensure we clean up any stale auxiliary gateway ports
self._delete_auxiliary_gateway_ports(context, global_router_id)
try:
if logical is True:
# We use parent class method as no special operations beyond
# what the base implemenation does are needed for logical
# global router
super(L3RouterApplianceDBMixin, self._l3_plugin).delete_router(
context, global_router_id)
else:
self._l3_plugin.delete_router(
context, global_router_id, unschedule=False)
except (exc.ObjectDeletedError, l3.RouterNotFound) as e:
g_r_type = 'Logical Global' if logical is True else 'Global'
LOG.info('Unable to delete %(g_r_type)s router %(r_id)s. It '
'has likely been concurrently deleted. %(err)s',
{'g_r_type': g_r_type, 'r_id': global_router_id,
'err': e})
except Exception as e:
g_r_type = 'Logical Global' if logical is True else 'Global'
LOG.debug('Failed to delete %(g_r_type)s router %(r_id). It may '
'have been deleted concurrently. Error details: '
'%(err)s',
{'g_r_type': g_r_type, 'r_id': global_router_id,
'err': e})
def _get_gateway_routers_count(self, context, ext_net_id, routertype_id,
router_role, hosting_device_id=None):
# Determine number of routers (with routertype_id and router_role)
# that act as gateway to ext_net_id and that are hosted on
# hosting_device_id (if specified).
query = context.session.query(bc.Router)
if router_role in [None, ROUTER_ROLE_HA_REDUNDANCY]:
# tenant router roles
query = query.join(models_v2.Port,
models_v2.Port.id == bc.Router.gw_port_id)
role_filter = expr.or_(
l3_models.RouterHostingDeviceBinding.role == expr.null(),
l3_models.RouterHostingDeviceBinding.role ==
ROUTER_ROLE_HA_REDUNDANCY)
else:
# global and logical global routers
query = query.join(models_v2.Port,
models_v2.Port.device_owner == bc.Router.id)
role_filter = (
l3_models.RouterHostingDeviceBinding.role == router_role)
query = query.join(
l3_models.RouterHostingDeviceBinding,
l3_models.RouterHostingDeviceBinding.router_id == bc.Router.id)
query = query.filter(
role_filter,
models_v2.Port.network_id == ext_net_id,
l3_models.RouterHostingDeviceBinding.router_type_id ==
routertype_id)
if hosting_device_id is not None:
query = query.filter(
l3_models.RouterHostingDeviceBinding.hosting_device_id ==
hosting_device_id)
return query.count()
# ---------------- General support functions -----------------
def _update_ha_redundancy_level(self, context, logical_global_router,
delta):
with context.session.begin(subtransactions=True):
log_g_router_db = self._l3_plugin._get_router(
context, logical_global_router['id'])
log_g_router_db.ha_settings.redundancy_level += delta
context.session.add(log_g_router_db.ha_settings)
def _router_name(self, router_id):
return N_ROUTER_PREFIX + router_id
def _global_router_name(self, hosting_device_id, logical=False):
if logical is True:
return cisco_constants.LOGICAL_ROUTER_ROLE_NAME
else:
return '%s-%s' % (cisco_constants.ROUTER_ROLE_NAME_PREFIX,
hosting_device_id[-cisco_constants.ROLE_ID_LEN:])
@property
def _core_plugin(self):
return bc.get_plugin()
@property
def _l3_plugin(self):
return bc.get_plugin(bc.constants.L3)<|fim▁end|>
|
return
|
<|file_name|>BenchmarkTest06598.java<|end_file_name|><|fim▁begin|>/**
* OWASP Benchmark Project v1.1
*
* This file is part of the Open Web Application Security Project (OWASP)
* Benchmark Project. For details, please see
* <a href="https://www.owasp.org/index.php/Benchmark">https://www.owasp.org/index.php/Benchmark</a>.
*
* The Benchmark is free software: you can redistribute it and/or modify it under the terms
* of the GNU General Public License as published by the Free Software Foundation, version 2.
*
* The Benchmark is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details
*
* @author Nick Sanidas <a href="https://www.aspectsecurity.com">Aspect Security</a>
* @created 2015
*/
package org.owasp.benchmark.testcode;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
@WebServlet("/BenchmarkTest06598")
public class BenchmarkTest06598 extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doPost(request, response);
}
@Override
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
org.owasp.benchmark.helpers.SeparateClassRequest scr = new org.owasp.benchmark.helpers.SeparateClassRequest( request );
String param = scr.getTheValue("foo");
java.util.List<String> valuesList = new java.util.ArrayList<String>( );
valuesList.add("safe");
valuesList.add( param );
valuesList.add( "moresafe" );
valuesList.remove(0); // remove the 1st safe value
String bar = valuesList.get(0); // get the param value<|fim▁hole|>
try {
java.util.Properties Benchmarkprops = new java.util.Properties();
Benchmarkprops.load(this.getClass().getClassLoader().getResourceAsStream("Benchmark.properties"));
String algorithm = Benchmarkprops.getProperty("cryptoAlg2", "AES/ECB/PKCS5Padding");
javax.crypto.Cipher c = javax.crypto.Cipher.getInstance(algorithm);
} catch (java.security.NoSuchAlgorithmException e) {
System.out.println("Problem executing crypto - javax.crypto.Cipher.getInstance(java.lang.String) Test Case");
throw new ServletException(e);
} catch (javax.crypto.NoSuchPaddingException e) {
System.out.println("Problem executing crypto - javax.crypto.Cipher.getInstance(java.lang.String) Test Case");
throw new ServletException(e);
}
response.getWriter().println("Crypto Test javax.crypto.Cipher.getInstance(java.lang.String) executed");
}
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# generated from catkin/cmake/template/__init__.py.in
# keep symbol table as clean as possible by deleting all unnecessary symbols
from os import path as os_path
from sys import path as sys_path
from pkgutil import extend_path
__extended_path = "/home/pi/Documents/desenvolvimentoRos/src/tf2_ros/src".split(";")
for p in reversed(__extended_path):
sys_path.insert(0, p)
del p
del sys_path
__path__ = extend_path(__path__, __name__)
del extend_path
__execfiles = []
for p in __extended_path:
src_init_file = os_path.join(p, __name__ + '.py')
if os_path.isfile(src_init_file):
__execfiles.append(src_init_file)
else:
src_init_file = os_path.join(p, __name__, '__init__.py')<|fim▁hole|> __execfiles.append(src_init_file)
del src_init_file
del p
del os_path
del __extended_path
for __execfile in __execfiles:
with open(__execfile, 'r') as __fh:
exec(__fh.read())
del __fh
del __execfile
del __execfiles<|fim▁end|>
|
if os_path.isfile(src_init_file):
|
<|file_name|>sessions.go<|end_file_name|><|fim▁begin|>/*
Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments
Copyright (C) ITsysCOM GmbH
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package services
import (
"fmt"
"sync"
"github.com/cgrates/cgrates/engine"
v1 "github.com/cgrates/cgrates/apier/v1"
"github.com/cgrates/cgrates/config"
"github.com/cgrates/cgrates/servmanager"
"github.com/cgrates/cgrates/sessions"
"github.com/cgrates/cgrates/utils"
"github.com/cgrates/rpcclient"
)
// NewSessionService returns the Session Service
func NewSessionService(cfg *config.CGRConfig, dm *DataDBService,
server *utils.Server, internalChan chan rpcclient.ClientConnector,
exitChan chan bool, connMgr *engine.ConnManager) servmanager.Service {
return &SessionService{
connChan: internalChan,
cfg: cfg,
dm: dm,
server: server,
exitChan: exitChan,
connMgr: connMgr,
}
}
// SessionService implements Service interface
type SessionService struct {
sync.RWMutex
cfg *config.CGRConfig
dm *DataDBService
server *utils.Server
exitChan chan bool
sm *sessions.SessionS
rpc *v1.SMGenericV1
rpcv1 *v1.SessionSv1
connChan chan rpcclient.ClientConnector
// in order to stop the bircp server if necesary
bircpEnabled bool
connMgr *engine.ConnManager
}
// Start should handle the sercive start
func (smg *SessionService) Start() (err error) {
if smg.IsRunning() {
return fmt.Errorf("service aleady running")
}
var datadb *engine.DataManager
if smg.dm.IsRunning() {
dbchan := smg.dm.GetDMChan()
datadb = <-dbchan
dbchan <- datadb
}
smg.Lock()
defer smg.Unlock()
smg.sm = sessions.NewSessionS(smg.cfg, datadb, smg.connMgr)
//start sync session in a separate gorutine
go func(sm *sessions.SessionS) {
if err = sm.ListenAndServe(smg.exitChan); err != nil {
utils.Logger.Err(fmt.Sprintf("<%s> error: %s!", utils.SessionS, err))
}
}(smg.sm)
// Pass internal connection via BiRPCClient
smg.connChan <- smg.sm
// Register RPC handler
smg.rpc = v1.NewSMGenericV1(smg.sm)
smg.rpcv1 = v1.NewSessionSv1(smg.sm) // methods with multiple options
if !smg.cfg.DispatcherSCfg().Enabled {
smg.server.RpcRegister(smg.rpc)
smg.server.RpcRegister(smg.rpcv1)
}
// Register BiRpc handlers
if smg.cfg.SessionSCfg().ListenBijson != "" {
smg.bircpEnabled = true
for method, handler := range smg.rpc.Handlers() {
smg.server.BiRPCRegisterName(method, handler)
}
for method, handler := range smg.rpcv1.Handlers() {
smg.server.BiRPCRegisterName(method, handler)
}
// run this in it's own gorutine
go func() {
if err := smg.server.ServeBiJSON(smg.cfg.SessionSCfg().ListenBijson, smg.sm.OnBiJSONConnect, smg.sm.OnBiJSONDisconnect); err != nil {
utils.Logger.Err(fmt.Sprintf("<%s> serve BiRPC error: %s!", utils.SessionS, err))
smg.Lock()
smg.bircpEnabled = false
smg.Unlock()
}
}()
}
return
}
// GetIntenternalChan returns the internal connection chanel
func (smg *SessionService) GetIntenternalChan() (conn chan rpcclient.ClientConnector) {
return smg.connChan
}
// Reload handles the change of config
func (smg *SessionService) Reload() (err error) {
return
}<|fim▁hole|>func (smg *SessionService) Shutdown() (err error) {
smg.Lock()
defer smg.Unlock()
if err = smg.sm.Shutdown(); err != nil {
return
}
if smg.bircpEnabled {
smg.server.StopBiRPC()
smg.bircpEnabled = false
}
smg.sm = nil
smg.rpc = nil
smg.rpcv1 = nil
<-smg.connChan
return
}
// IsRunning returns if the service is running
func (smg *SessionService) IsRunning() bool {
smg.RLock()
defer smg.RUnlock()
return smg != nil && smg.sm != nil
}
// ServiceName returns the service name
func (smg *SessionService) ServiceName() string {
return utils.SessionS
}
// ShouldRun returns if the service should be running
func (smg *SessionService) ShouldRun() bool {
return smg.cfg.SessionSCfg().Enabled
}<|fim▁end|>
|
// Shutdown stops the service
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os
from distutils.sysconfig import get_python_inc
from distutils.core import Extension, setup
try:
from Cython.Build import cythonize
except ImportError:
print("Please install cython and try again.")
raise SystemExit
PACKAGES = [
'pdsa',
'pdsa.cardinality',
'pdsa.frequency',
'pdsa.helpers',
'pdsa.helpers.hashing',
'pdsa.helpers.storage',
'pdsa.membership',
'pdsa.rank',
]
def setup_package():
root = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(root, 'pdsa', '__about__.py')) as f:
about = {}
exec(f.read(), about)
with open(os.path.join(root, 'README.rst')) as f:
readme = f.read()
extensions = []
extensions.append(
Extension(
"pdsa.membership.bloom_filter",
language='c++',
sources=['pdsa/membership/bloom_filter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.membership.counting_bloom_filter",
language='c++',
sources=['pdsa/membership/counting_bloom_filter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.linear_counter",
language='c++',
sources=['pdsa/cardinality/linear_counter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.probabilistic_counter",
language='c++',
sources=['pdsa/cardinality/probabilistic_counter.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.cardinality.hyperloglog",
language='c++',
sources=['pdsa/cardinality/hyperloglog.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.helpers.hashing.mmh",
language='c++',
sources=[
'pdsa/helpers/hashing/mmh.pyx',
os.path.join('pdsa/helpers/hashing', 'src', 'MurmurHash3.cpp')
],
include_dirs=[<|fim▁hole|> )
)
extensions.append(
Extension(
"pdsa.helpers.storage.bitvector",
language='c++',
sources=[
'pdsa/helpers/storage/bitvector.pyx',
os.path.join('pdsa/helpers/storage', 'src', 'BitField.cpp')
],
include_dirs=[
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/storage', 'src')
]
)
)
extensions.append(
Extension(
"pdsa.helpers.storage.bitvector_counter",
language='c++',
sources=[
'pdsa/helpers/storage/bitvector_counter.pyx',
os.path.join('pdsa/helpers/storage', 'src', 'BitCounter.cpp')
],
include_dirs=[
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/storage', 'src')
]
)
)
extensions.append(
Extension(
"pdsa.frequency.count_min_sketch",
language='c++',
sources=['pdsa/frequency/count_min_sketch.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.frequency.count_sketch",
language='c++',
sources=['pdsa/frequency/count_sketch.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.rank.random_sampling",
language='c++',
sources=['pdsa/rank/random_sampling.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
extensions.append(
Extension(
"pdsa.rank.qdigest",
language='c++',
sources=['pdsa/rank/qdigest.pyx'],
include_dirs=[
get_python_inc(plat_specific=True),
]
)
)
setup(
name="pdsa",
packages=PACKAGES,
package_data={'': ['*.pyx', '*.pxd', '*.cpp', '*.h']},
description=about['__summary__'],
long_description=readme,
keywords=about['__keywords__'],
author=about['__author__'],
author_email=about['__email__'],
version=about['__version__'],
url=about['__uri__'],
license=about['__license__'],
ext_modules=cythonize(
extensions,
compiler_directives={"language_level": "3str"}
),
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Cython',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Scientific/Engineering'
],
python_requires='>=3.5',
install_requires=["cython>=0.28"]
)
if __name__ == '__main__':
setup_package()<|fim▁end|>
|
get_python_inc(plat_specific=True),
os.path.join('pdsa/helpers/hashing', 'src')
]
|
<|file_name|>Buffalo.js<|end_file_name|><|fim▁begin|>{
"metadata" :
{
"formatVersion" : 3.1,
"sourceFile" : "Buffalo.obj",
"generatedBy" : "OBJConverter",
"vertices" : 2202,
"faces" : 4134,
"normals" : 2113,
"uvs" : 0,
"materials" : 4
},
"materials": [ {
"DbgColor" : 15658734,
"DbgIndex" : 0,
"DbgName" : "Material.006",
"colorDiffuse" : [0.287072, 0.214367, 0.076018],
"colorSpecular" : [0.5, 0.5, 0.5],
"illumination" : 2,
"opacity" : 1.0,
"opticalDensity" : 1.0,
"specularCoef" : 96.078431
},
{
"DbgColor" : 15597568,
"DbgIndex" : 1,
"DbgName" : "hoof",
"colorDiffuse" : [0.017583, 0.008414, 0.0],
"colorSpecular" : [0.0, 0.0, 0.0],
"illumination" : 2,
"opacity" : 1.0,
"opticalDensity" : 1.0,
"specularCoef" : 96.078431
},
{
"DbgColor" : 60928,
"DbgIndex" : 2,
"DbgName" : "horse_body",
"colorDiffuse" : [0.098968, 0.055305, 0.002045],
"colorSpecular" : [0.0, 0.0, 0.0],
"illumination" : 2,
"opacity" : 1.0,
"opticalDensity" : 1.0,
"specularCoef" : 96.078431
},
{
"DbgColor" : 238,
"DbgIndex" : 3,
"DbgName" : "tail",
"colorDiffuse" : [0.0, 0.0, 0.0],
"colorSpecular" : [0.0, 0.0, 0.0],
"illumination" : 2,
"opacity" : 1.0,
"opticalDensity" : 1.0,
"specularCoef" : 96.078431<|fim▁hole|>
"buffers": "Buffalo.bin"
}<|fim▁end|>
|
}],
|
<|file_name|>enginectrl.py<|end_file_name|><|fim▁begin|>from multiprocessing import Process, JoinableQueue, Manager, Lock, Value, Event
import wiringpi as wp
import RPi.GPIO as rpio
from slaveprocess import SlaveProcess
import time
rpio.setmode(rpio.BCM)
class PMWProcess(Process):
def __init__(self,**kwargs):
super(PWMProcess, self).__init__(**kwargs)
self.event_enable_pwm = kwargs['event_enable_pwm']
self.event_terminate = kwargs['event_terminate']
self.pwm_freq = kwargs['pwm_freq']
self.pwm_duty = kwargs['pwm_duty']
self.lock_freq = kwargs['lock_freq']
self.pin = kwargs['pin']
def run():
while self.event_enable_pwm.is_set():
start_clock = time.time()
with self.lock_freq:
pwm_freq = self.pwm_freq.value
pwm_duty = self.pwm_duty.value
period=1./pwm_freq
class DriveCtrl():
def __init__(self, **kwargs):
self.cfg = kwargs['config']
self.queues = kwargs['queues']
## motor parameters :
self.speeds = (10,20,50,100)
if self.cfg.lookup('drive.speeds') is not None:
self.speeds = tuple([max(100,x) for x in self.cfg.lookup('drive.speeds')])
self.max_speed = max(self.speeds)
self.nb_speeds = len(self.speeds)
self.current_speed = self.speeds[0]
self.queues['log'].put('drive:nb speeds : %d'%(self.nb_speeds))
## pins :
self.power_pins={'L':0,'R':0}
self.direction_pins = {'L':0,'R':0}
self.monitor_pins={'LF':0,'LB':0,'RB':0,'RF':0}
self.pin_power_left = 0
self.pin_power_right = 0
self.pin_direction_left = 0
self.pin_direction_right = 0
## PWM options :
if self.cfg.lookup('gpio.pwm_freq'):
self.pwm_freq = float(self.cfg.gpio.pwm_freq)
else:
self.pwm_freq = 50.0
###################### DEFAULT DRIVE VECTORS #######################
#################################
# COMMANDS
#################################
## Drive commands :
# North :
# _ _
# ^ | |_____| | ^ | |x| |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| |
#
# North East :
# _ _
# ^ | |_ _ _| | | | |x|
# | | | ^ | | ^ | | | |
# 0.8 | | |__^__| | | 0.2 | | | |
# | |_| |_|
#
# East :
# _ _
# ^ | |_____| | | | | | |
# | | | ^ | | | | | |x|
# 1.0 | | |__^__| | | 1.0 | | | |
# | |_| |_| v
#
# South East :
# _ _
# | | |_____| | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | v 0.8 | | |x|
# v |_| |_|
#
# South :
# _ _
# | | |_____| | | | | | |
# | | | ^ | | | | | | |
# 1.0 | | |__^__| | | 1.0 | |x| |
# v |_| |_| v
#
# South West :
# _ _
# | |_____| | | | | | |
# | | ^ | | | | | | |
# 0.2 | | |__^__| | | 0.8 |x| | |
# v |_| |_| v
#
# West :
# _ _
# | | |_____| | ^ | | | |
# | | | ^ | | | |x| | |
# 1.0 | | |__^__| | | 1.0 | | | |
# v |_| |_| |
#
# North West :
# _ _
# ^ | |_____| | ^ |x| | |
# | | | ^ | | | | | | |
# 0.2 | |__^__| | | 0.8 | | | |
# |_| |_| |
#
# Full stop :
# _ _
# | |_____| | | | | |
# | | ^ | | | |x| |
# 0.0 | |__^__| | 0.0 | | | |
# |_| |_|
#
self.vec_north = (1.0,1.0,1,1,0,0)
self.vec_north_east = (0.8,0.2,1,1,0,0)
self.vec_east = (1.0,1.0,1,0,0,1)
self.vec_south_east = (0.8,0.2,0,0,1,1)
self.vec_south = (1.0,1.0,0,0,1,1)
self.vec_south_west = (0.2,0.8,0,0,1,1)
self.vec_west = (1.0,1.0,0,1,1,0)
self.vec_north_west = (0.2,0.8,1,1,0,0)
self.vec_full_stop = (0,0,0,0,0,0)
self.load_drive_vectors()
self.current_vector = self.vec_full_stop
## read the mapping of GPIO pins
self.read_gpio_map_from_config()
self.gpio_init()
self.dict_steer = {'8':self.vec_north, \
'9':self.vec_north_east, \
'6':self.vec_east,\
'3':self.vec_south_east,\
'2':self.vec_south,\
'1':self.vec_south_west,\
'4':self.vec_west,\
'7':self.vec_north_west,\
'5':self.vec_full_stop}
def load_drive_vectors(self):
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
vecpath = 'drive.vectors.'+vecname
#self.queues['log'].put('drive: loading drive vector %s'%vecpath)
if self.cfg.lookup(vecpath) is not None:
vecarray = self.cfg.lookup(vecpath)
if len(vecarray) != 6:
self.queues['log'].put('drive:error: drive vector %s in config file'%(vecname))
setattr(self,'vec_'+vecname, tuple([x for x in vecarray]))
def read_gpio_map_from_config(self):
self.pin_power_left = self.cfg.gpio.pin_pwm_left
self.pin_power_right = self.cfg.gpio.pin_pwm_right
self.pin_direction_left_forward = self.cfg.gpio.pin_direction_left_forward
self.pin_direction_right_forward = self.cfg.gpio.pin_direction_right_forward
self.pin_direction_left_rear = self.cfg.gpio.pin_direction_left_rear
self.pin_direction_right_rear = self.cfg.gpio.pin_direction_right_rear
def gpio_init(self):
wp.wiringPiSetupSys()
# Set output for those pins :
wp.pinMode(self.pin_power_left, wp.OUTPUT)
wp.pinMode(self.pin_power_right, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_forward, wp.OUTPUT)
wp.pinMode(self.pin_direction_left_rear, wp.OUTPUT)
wp.pinMode(self.pin_direction_right_rear, wp.OUTPUT)
## create the SoftPwm on power pins :
wp.softPwmCreate(self.pin_power_left, 0, self.max_speed)
wp.softPwmCreate(self.pin_power_right, 0, self.max_speed)
## reset everyone :
self.gpio_zero()
<|fim▁hole|> ## open pins for output :
rpio.setup(self.pin_power_left, rpio.OUT)
rpio.setup(self.pin_power_right, rpio.OUT)
rpio.setup(self.pin_direction_left_forward, rpio.OUT)
rpio.setup(self.pin_direction_right_forward, rpio.OUT)
rpio.setup(self.pin_direction_left_rear, rpio.OUT)
rpio.setup(self.pin_direction_right_rear, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
self.pwm_left = rpio.PWM(self.pin_power_left, self.pwm_freq)
self.pwm_right = rpio.PWM(self.pin_power_right, self.pwm_freq)
self.pwm_left.start(0)
self.pwm_right.start(0)
def gpio_zero(self):
# set everyone to 0
wp.softPwmWrite(self.pin_power_left, 0)
wp.softPwmWrite(self.pin_power_right, 0)
wp.digitalWrite(self.pin_direction_left_forward, 0)
wp.digitalWrite(self.pin_direction_right_forward, 0)
wp.digitalWrite(self.pin_direction_left_rear, 0)
wp.digitalWrite(self.pin_direction_right_rear, 0)
def rpio_zero(self):
self.pwm_left.ChangeDutyCycle(0)
self.pwm_right.ChangeDutyCycle(0)
rpio.output(self.pin_direction_left_forward, 0)
rpio.output(self.pin_direction_right_forward, 0)
rpio.output(self.pin_direction_left_rear, 0)
rpio.output(self.pin_direction_right_rear, 0)
def gpio_steer(self, drive_vector):
wp.softPwmWrite(self.pin_power_left, int(self.current_speed*drive_vector[0]))
wp.softPwmWrite(self.pin_power_right, int(self.current_speed*drive_vector[1]))
wp.digitalWrite(self.pin_direction_left_forward, drive_vector[2])
wp.digitalWrite(self.pin_direction_right_forward, drive_vector[3])
wp.digitalWrite(self.pin_direction_left_rear, drive_vector[4])
wp.digitalWrite(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_steer(self,drive_vector):
self.pwm_left.ChangeDutyCycle(self.current_speed*drive_vector[0])
self.pwm_right.ChangeDutyCycle(self.current_speed*drive_vector[1])
rpio.output(self.pin_direction_left_forward, drive_vector[2])
rpio.output(self.pin_direction_right_forward, drive_vector[3])
rpio.output(self.pin_direction_left_rear, drive_vector[4])
rpio.output(self.pin_direction_right_rear, drive_vector[5])
actual_vec = (int(self.current_speed*drive_vector[0]), int(self.current_speed*drive_vector[1]),drive_vector[2], drive_vector[3], drive_vector[4], drive_vector[5])
msg='drive:steering, drive vector: %s, ppl %d ppr %d pdlf %d pdrf %d pdlr %d pdrr %d\n'%(str(actual_vec),self.pin_power_left, self.pin_power_right, self.pin_direction_left_forward, self.pin_direction_right_forward, self.pin_direction_left_rear, self.pin_direction_right_rear)
self.current_vector = drive_vector
self.queues['tx_msg'].put(msg)
self.queues['log'].put(msg)
def rpio_cleanup(self):
self.pwm_left.stop()
self.pwm_right.stop()
rpio.cleanup()
def execute_drive_cmd(self,raw_cmd):
self.queues['log'].put("drive:executing cmd :%s"%raw_cmd)
if len(raw_cmd)>2:
if raw_cmd[1] == 'G':
## command 'DG[1-9]' : steering command
if raw_cmd[2] in self.dict_steer:
self.gpio_steer(self.dict_steer[raw_cmd[2]])
else:
self.queues['tx_msg'].put('drive:unknown steering command key \"%s\" (available : [1-9]).\n'%(raw_cmd[2]))
elif raw_cmd[1] == 'S':
## command 'DS[0-9]' : change speed
speed_setting = int(raw_cmd[2:])
if speed_setting >= 0:
self.current_speed = self.speeds[min(self.nb_speeds-1,speed_setting)]
self.gpio_steer(self.current_vector)
self.queues['log'].put('drive:current speed set to %s'%(str(self.current_speed)))
else:
self.queues['tx_msg'].put('drive:could not change speed setting to %d, must be positive'%(speed_setting))
elif raw_cmd[1] == 'M':
## command 'DM' : requesting monitoring data
pass
else:
self.queues['tx_msg'].put('drive:discarding malformed speed setting command \"%s\"\n'%raw_cmd)
def checks(self, remote=False):
## check drive vectors :
for vecname in ['north','north_east','east','south_east','south','south_west','west','north_west']:
msg = 'drive:checking drive vector %s:%s'%(vecname,getattr(self,'vec_'+vecname).__repr__())
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
## check speed settings
msg='drive:checking available speeds: %s'%(str(self.speeds))
self.queues['log'].put(msg)
if remote:
self.queues['tx_msg'].put(msg)
def shutdown(self):
self.gpio_zero()
#self.gpio_cleanup()
self.queues['log'].put('drive:stop.')
if __name__ == "__main__":
pwm_freq = 100
pin_power_left = 16
pin_power_right = 20
pin_direction_left_forward = 6
pin_direction_right_forward = 13
pin_direction_left_rear = 19
pin_direction_right_rear = 26
rpio.setmode(rpio.BCM)
## open pins for output :
rpio.setup(pin_power_left, rpio.OUT)
rpio.setup(pin_power_right, rpio.OUT)
rpio.setup(pin_direction_left, rpio.OUT)
rpio.setup(pin_direction_right, rpio.OUT)
## open pins for input :
# disabled for now
## setup software pwm
pwm_left = rpio.PWM(pin_power_left, pwm_freq)
pwm_right = rpio.PWM(pin_power_right, pwm_freq)
pwm_left.start(50)
pwm_right.start(50)
current_cycle_up = 50
current_cycle_down = 50
goon=True
periode=0.01
step=1
while goon:
try:
pwm_left.ChangeDutyCycle(current_cycle_up)
pwm_right.ChangeDutyCycle(current_cycle_down)
print current_cycle_up, current_cycle_down
current_cycle_up = abs((current_cycle_up + step)%100)
current_cycle_down = abs((current_cycle_down - step)%100)
time.sleep(periode)
except KeyboardInterrupt as e:
goon=False
rpio.cleanup()<|fim▁end|>
|
def rpio_init(self):
|
<|file_name|>mobile.js<|end_file_name|><|fim▁begin|>/*
* VITacademics
* Copyright (C) 2014-2016 Aneesh Neelam <[email protected]>
* Copyright (C) 2014-2016 Ayush Agarwal <[email protected]>
*
* This file is part of VITacademics.
*
* VITacademics is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* VITacademics is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with VITacademics. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
const path = require('path');
const status = require(path.join(__dirname, '..', 'status'));
const handler = function (app) {
const onJob = function (job, ack) {
job.status = status.toDo;
console.log(JSON.stringify(job));
ack();
};
pp.rabbit.queue(app.rabbit.queues.mobile)
.consume(onJob, noAck
:<|fim▁hole|> )
;
};
module.exports = handler;<|fim▁end|>
|
false
|
<|file_name|>contentscript.js<|end_file_name|><|fim▁begin|>/*******************************************************************************
uBlock Origin - a browser extension to block requests.
Copyright (C) 2014-present Raymond Hill
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see { This starts bootstrap process.http://www.gnu.org/licenses/}.
Home: https://github.com/gorhill/uBlock
*/
'use strict';
//console.log('contentscript.js: '+location.href, (window.self !== window.top?'[iframe]':''));
/*******************************************************************************
+--> domCollapser
|
|
domWatcher--+
| +-- domSurveyor
| |
+--> domFilterer --+-- [domLogger]
| |
| +-- [domInspector]
|
[domProceduralFilterer]
domWatcher:
Watches for changes in the DOM, and notify the other components about these
changes.
domCollapser:
Enforces the collapsing of DOM elements for which a corresponding
resource was blocked through network filtering.
domFilterer:
Enforces the filtering of DOM elements, by feeding it cosmetic filters.
domProceduralFilterer:
Enforce the filtering of DOM elements through procedural cosmetic filters.
Loaded on demand, only when needed.
domSurveyor:
Surveys the DOM to find new cosmetic filters to apply to the current page.
domLogger:
Surveys the page to find and report the injected cosmetic filters blocking
actual elements on the current page. This component is dynamically loaded
IF AND ONLY IF uBO's logger is opened.
If page is whitelisted:
- domWatcher: off
- domCollapser: off
- domFilterer: off
- domSurveyor: off
- domLogger: off
I verified that the code in this file is completely flushed out of memory
when a page is whitelisted.
If cosmetic filtering is disabled:
- domWatcher: on
- domCollapser: on
- domFilterer: off
- domSurveyor: off
- domLogger: off
If generic cosmetic filtering is disabled:
- domWatcher: on
- domCollapser: on
- domFilterer: on
- domSurveyor: off
- domLogger: on if uBO logger is opened
If generic cosmetic filtering is enabled:
- domWatcher: on
- domCollapser: on
- domFilterer: on
- domSurveyor: on
- domLogger: on if uBO logger is opened
Additionally, the domSurveyor can turn itself off once it decides that
it has become pointless (repeatedly not finding new cosmetic filters).
The domFilterer makes use of platform-dependent user stylesheets[1].
[1] "user stylesheets" refer to local CSS rules which have priority over,
and can't be overriden by a web page's own CSS rules.
*/
// Abort execution if our global vAPI object does not exist.
// https://github.com/chrisaljoudi/uBlock/issues/456
// https://github.com/gorhill/uBlock/issues/2029
// >>>>>>>> start of HUGE-IF-BLOCK
if ( typeof vAPI === 'object' && !vAPI.contentScript ) {
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
vAPI.contentScript = true;
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// https://github.com/uBlockOrigin/uBlock-issues/issues/688#issuecomment-663657508
{
let context = self;
try {
while (
context !== self.top &&
context.location.href.startsWith('about:blank') &&
context.parent.location.href
) {
context = context.parent;
}
} catch(ex) {
}
vAPI.effectiveSelf = context;
}
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
vAPI.userStylesheet = {
added: new Set(),
removed: new Set(),
apply: function(callback) {
if ( this.added.size === 0 && this.removed.size === 0 ) { return; }
vAPI.messaging.send('vapi', {
what: 'userCSS',
add: Array.from(this.added),
remove: Array.from(this.removed),
}).then(( ) => {
if ( callback instanceof Function === false ) { return; }
callback();
});
this.added.clear();
this.removed.clear();
},
add: function(cssText, now) {
if ( cssText === '' ) { return; }
this.added.add(cssText);
if ( now ) { this.apply(); }
},
remove: function(cssText, now) {
if ( cssText === '' ) { return; }
this.removed.add(cssText);
if ( now ) { this.apply(); }
}
};
/******************************************************************************/
/******************************************************************************/
/*******************************************************************************
The purpose of SafeAnimationFrame is to take advantage of the behavior of
window.requestAnimationFrame[1]. If we use an animation frame as a timer,
then this timer is described as follow:
- time events are throttled by the browser when the viewport is not visible --
there is no point for uBO to play with the DOM if the document is not
visible.
- time events are micro tasks[2].
- time events are synchronized to monitor refresh, meaning that they can fire
at most 1/60 (typically).
If a delay value is provided, a plain timer is first used. Plain timers are
macro-tasks, so this is good when uBO wants to yield to more important tasks
on a page. Once the plain timer elapse, an animation frame is used to trigger
the next time at which to execute the job.
[1] https://developer.mozilla.org/en-US/docs/Web/API/window/requestAnimationFrame
[2] https://jakearchibald.com/2015/tasks-microtasks-queues-and-schedules/
*/
vAPI.useShadowDOM = false; // ADN
// https://github.com/gorhill/uBlock/issues/2147
vAPI.SafeAnimationFrame = class {
constructor(callback) {
this.fid = this.tid = undefined;
this.callback = callback;
}
start(delay) {
if ( self.vAPI instanceof Object === false ) { return; }
if ( delay === undefined ) {
if ( this.fid === undefined ) {
this.fid = requestAnimationFrame(( ) => { this.onRAF(); } );
}
if ( this.tid === undefined ) {
this.tid = vAPI.setTimeout(( ) => { this.onSTO(); }, 20000);
}
return;
}
if ( this.fid === undefined && this.tid === undefined ) {
this.tid = vAPI.setTimeout(( ) => { this.macroToMicro(); }, delay);
}
}
clear() {
if ( this.fid !== undefined ) {
cancelAnimationFrame(this.fid);
this.fid = undefined;
}
if ( this.tid !== undefined ) {
clearTimeout(this.tid);
this.tid = undefined;
}
}
macroToMicro() {
this.tid = undefined;
this.start();
}
onRAF() {
if ( this.tid !== undefined ) {
clearTimeout(this.tid);
this.tid = undefined;
}
this.fid = undefined;
this.callback();
}
onSTO() {
if ( this.fid !== undefined ) {
cancelAnimationFrame(this.fid);
this.fid = undefined;
}
this.tid = undefined;
this.callback();
}
};
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// https://github.com/uBlockOrigin/uBlock-issues/issues/552
// Listen and report CSP violations so that blocked resources through CSP
// are properly reported in the logger.
{
const newEvents = new Set();
const allEvents = new Set();
let timer;
const send = function() {
vAPI.messaging.send('scriptlets', {
what: 'securityPolicyViolation',
type: 'net',
docURL: document.location.href,
violations: Array.from(newEvents),
}).then(response => {
if ( response === true ) { return; }
stop();
});
for ( const event of newEvents ) {
allEvents.add(event);
}
newEvents.clear();
};
const sendAsync = function() {
if ( timer !== undefined ) { return; }
timer = self.requestIdleCallback(
( ) => { timer = undefined; send(); },
{ timeout: 2063 }
);
};
const listener = function(ev) {
if ( ev.isTrusted !== true ) { return; }
if ( ev.disposition !== 'enforce' ) { return; }
const json = JSON.stringify({
url: ev.blockedURL || ev.blockedURI,
policy: ev.originalPolicy,
directive: ev.effectiveDirective || ev.violatedDirective,
});
if ( allEvents.has(json) ) { return; }
newEvents.add(json);
sendAsync();
};
const stop = function() {
newEvents.clear();
allEvents.clear();
if ( timer !== undefined ) {
self.cancelIdleCallback(timer);
timer = undefined;
}
document.removeEventListener('securitypolicyviolation', listener);
vAPI.shutdown.remove(stop);
};
document.addEventListener('securitypolicyviolation', listener);
vAPI.shutdown.add(stop);
// We need to call at least once to find out whether we really need to
// listen to CSP violations.
sendAsync();
}
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// vAPI.domWatcher
{
vAPI.domMutationTime = Date.now();
const addedNodeLists = [];
const removedNodeLists = [];
const addedNodes = [];
const ignoreTags = new Set([ 'br', 'head', 'link', 'meta', 'script', 'style' ]);
const listeners = [];
let domLayoutObserver;
let listenerIterator = [];
let listenerIteratorDirty = false;
let removedNodes = false;
let safeObserverHandlerTimer;
const safeObserverHandler = function() {
let i = addedNodeLists.length;
while ( i-- ) {
const nodeList = addedNodeLists[i];
let iNode = nodeList.length;
while ( iNode-- ) {
const node = nodeList[iNode];
if ( node.nodeType !== 1 ) { continue; }
if ( ignoreTags.has(node.localName) ) { continue; }
if ( node.parentElement === null ) { continue; }
addedNodes.push(node);
}
}
addedNodeLists.length = 0;
i = removedNodeLists.length;
while ( i-- && removedNodes === false ) {
const nodeList = removedNodeLists[i];
let iNode = nodeList.length;
while ( iNode-- ) {
if ( nodeList[iNode].nodeType !== 1 ) { continue; }
removedNodes = true;
break;
}
}
removedNodeLists.length = 0;
if ( addedNodes.length === 0 && removedNodes === false ) { return; }
for ( const listener of getListenerIterator() ) {
try { listener.onDOMChanged(addedNodes, removedNodes); }
catch (ex) { }
}
addedNodes.length = 0;
removedNodes = false;
vAPI.domMutationTime = Date.now();
};
// https://github.com/chrisaljoudi/uBlock/issues/205
// Do not handle added node directly from within mutation observer.
const observerHandler = function(mutations) {
let i = mutations.length;
while ( i-- ) {
const mutation = mutations[i];
let nodeList = mutation.addedNodes;
if ( nodeList.length !== 0 ) {
addedNodeLists.push(nodeList);
}
nodeList = mutation.removedNodes;
if ( nodeList.length !== 0 ) {
removedNodeLists.push(nodeList);
}
}
if ( addedNodeLists.length !== 0 || removedNodeLists.length !== 0 ) {
safeObserverHandlerTimer.start(
addedNodeLists.length < 100 ? 1 : undefined
);
}
};
const startMutationObserver = function() {
if ( domLayoutObserver !== undefined ) { return; }
domLayoutObserver = new MutationObserver(observerHandler);
domLayoutObserver.observe(document.documentElement, {
//attributeFilter: [ 'class', 'id' ],
//attributes: true,
childList: true,
subtree: true
});
safeObserverHandlerTimer = new vAPI.SafeAnimationFrame(safeObserverHandler);
vAPI.shutdown.add(cleanup);
};
const stopMutationObserver = function() {
if ( domLayoutObserver === undefined ) { return; }
cleanup();
vAPI.shutdown.remove(cleanup);
};
const getListenerIterator = function() {
if ( listenerIteratorDirty ) {
listenerIterator = listeners.slice();
listenerIteratorDirty = false;
}
return listenerIterator;
};
const addListener = function(listener) {
if ( listeners.indexOf(listener) !== -1 ) { return; }
listeners.push(listener);
listenerIteratorDirty = true;
if ( domLayoutObserver === undefined ) { return; }
try { listener.onDOMCreated(); }
catch (ex) { }
startMutationObserver();
};
const removeListener = function(listener) {
const pos = listeners.indexOf(listener);
if ( pos === -1 ) { return; }
listeners.splice(pos, 1);
listenerIteratorDirty = true;
if ( listeners.length === 0 ) {
stopMutationObserver();
}
};
const cleanup = function() {
if ( domLayoutObserver !== undefined ) {
domLayoutObserver.disconnect();
domLayoutObserver = undefined;
}
if ( safeObserverHandlerTimer !== undefined ) {
safeObserverHandlerTimer.clear();
safeObserverHandlerTimer = undefined;
}
};
const start = function() {
for ( const listener of getListenerIterator() ) {
try { listener.onDOMCreated(); }
catch (ex) { }
}
startMutationObserver();
};
vAPI.domWatcher = { start, addListener, removeListener };
}
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
vAPI.injectScriptlet = function(doc, text) {
if ( !doc ) { return; }
let script;
try {
script = doc.createElement('script');
script.appendChild(doc.createTextNode(text));
(doc.head || doc.documentElement).appendChild(script);
} catch (ex) {
}
if ( script ) {
if ( script.parentNode ) {
script.parentNode.removeChild(script);
}
script.textContent = '';
}
};
/******************************************************************************/
/******************************************************************************/
/*******************************************************************************
The DOM filterer is the heart of uBO's cosmetic filtering.
DOMFilterer: adds procedural cosmetic filtering
*/
vAPI.hideStyle = 'display:none!important;';
vAPI.DOMFilterer = class {
constructor() {
this.commitTimer = new vAPI.SafeAnimationFrame(
( ) => { this.commitNow(); }
);
this.domIsReady = document.readyState !== 'loading';
this.disabled = false;
this.listeners = [];
this.stylesheets = [];
this.exceptedCSSRules = [];
this.exceptions = [];
this.proceduralFilterer = null;
// https://github.com/uBlockOrigin/uBlock-issues/issues/167
// By the time the DOMContentLoaded is fired, the content script might
// have been disconnected from the background page. Unclear why this
// would happen, so far seems to be a Chromium-specific behavior at
// launch time.
if ( this.domIsReady !== true ) {
document.addEventListener('DOMContentLoaded', ( ) => {
if ( vAPI instanceof Object === false ) { return; }
this.domIsReady = true;
this.commit();
});
}
}
explodeCSS(css) {
const out = [];
const reBlock = /^\{(.*)\}$/m;
const blocks = css.trim().split(/\n\n+/);
for ( const block of blocks ) {
const match = reBlock.exec(block);
out.push([ block.slice(0, match.index).trim(), match[1] ]);
}
return out;
}
addCSS(css, details = {}) {
if ( typeof css !== 'string' || css.length === 0 ) { return; }
if ( this.stylesheets.includes(css) ) { return; }
this.stylesheets.push(css);
if ( details.mustInject && this.disabled === false ) {
vAPI.userStylesheet.add(css);
}
if ( this.hasListeners() === false ) { return; }
if ( details.silent ) { return; }
this.triggerListeners({ declarative: this.explodeCSS(css) });
}
exceptCSSRules(exceptions) {
if ( exceptions.length === 0 ) { return; }
this.exceptedCSSRules.push(...exceptions);
if ( this.hasListeners() ) {
this.triggerListeners({ exceptions });
}
}
addListener(listener) {
if ( this.listeners.indexOf(listener) !== -1 ) { return; }
this.listeners.push(listener);
}
removeListener(listener) {
const pos = this.listeners.indexOf(listener);
if ( pos === -1 ) { return; }
this.listeners.splice(pos, 1);
}
hasListeners() {
return this.listeners.length !== 0;
}
triggerListeners(changes) {
for ( const listener of this.listeners ) {
listener.onFiltersetChanged(changes);
}
}
toggle(state, callback) {
if ( state === undefined ) { state = this.disabled; }
if ( state !== this.disabled ) { return; }
this.disabled = !state;
const uss = vAPI.userStylesheet;
for ( const css of this.stylesheets ) {
if ( this.disabled ) {
uss.remove(css);
} else {
uss.add(css);
}
}
uss.apply(callback);
}
// Here we will deal with:
// - Injecting low priority user styles;
// - Notifying listeners about changed filterset.
// https://www.reddit.com/r/uBlockOrigin/comments/9jj0y1/no_longer_blocking_ads/
// Ensure vAPI is still valid -- it can go away by the time we are
// called, since the port could be force-disconnected from the main
// process. Another approach would be to have vAPI.SafeAnimationFrame
// register a shutdown job: to evaluate. For now I will keep the fix
// trivial.
commitNow() {
this.commitTimer.clear();
if ( vAPI instanceof Object === false ) { return; }
vAPI.userStylesheet.apply();
if ( this.proceduralFilterer instanceof Object ) {
this.proceduralFilterer.commitNow();
}
}
commit(commitNow) {
if ( commitNow ) {
this.commitTimer.clear();
this.commitNow();
} else {
this.commitTimer.start();
}
}
proceduralFiltererInstance() {
if ( this.proceduralFilterer instanceof Object === false ) {
if ( vAPI.DOMProceduralFilterer instanceof Object === false ) {
return null;
}
this.proceduralFilterer = new vAPI.DOMProceduralFilterer(this);
}
return this.proceduralFilterer;
}
addProceduralSelectors(selectors) {
if ( Array.isArray(selectors) === false || selectors.length === 0 ) {
return;
}
const procedurals = [];
for ( const raw of selectors ) {
procedurals.push(JSON.parse(raw));
}
if ( procedurals.length === 0 ) { return; }
const pfilterer = this.proceduralFiltererInstance();
if ( pfilterer !== null ) {
pfilterer.addProceduralSelectors(procedurals);
}
}
createProceduralFilter(o) {
const pfilterer = this.proceduralFiltererInstance();
if ( pfilterer === null ) { return; }
return pfilterer.createProceduralFilter(o);
}
getAllSelectors(bits = 0) {
const out = {
declarative: [],
exceptions: this.exceptedCSSRules,
};
const hasProcedural = this.proceduralFilterer instanceof Object;
const includePrivateSelectors = (bits & 0b01) !== 0;
const masterToken = hasProcedural
? `[${this.proceduralFilterer.masterToken}]`
: undefined;
for ( const css of this.stylesheets ) {
const blocks = this.explodeCSS(css);
for ( const block of blocks ) {
if (
includePrivateSelectors === false &&
masterToken !== undefined &&
block[0].startsWith(masterToken)
) {
continue;
}
out.declarative.push([ block[0], block[1] ]);
}
}
const excludeProcedurals = (bits & 0b10) !== 0;
if ( excludeProcedurals !== true ) {
out.procedural = hasProcedural
? Array.from(this.proceduralFilterer.selectors.values())
: [];
}
return out;
}
getAllExceptionSelectors() {
return this.exceptions.join(',\n');
}
};
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// vAPI.domCollapser
{
const messaging = vAPI.messaging;
const toCollapse = new Map();
const src1stProps = {
audio: 'currentSrc',
embed: 'src',
iframe: 'src',
img: 'currentSrc',
object: 'data',
video: 'currentSrc',
};
const src2ndProps = {
audio: 'src',
img: 'src',
video: 'src',
};
const tagToTypeMap = {
audio: 'media',
embed: 'object',
iframe: 'sub_frame',
img: 'image',
object: 'object',
video: 'media',
};
let resquestIdGenerator = 1,
processTimer,
cachedBlockedSet,
cachedBlockedSetHash,
cachedBlockedSetTimer,
toProcess = [],
toFilter = [],
netSelectorCacheCount = 0;
const cachedBlockedSetClear = function() {
cachedBlockedSet =
cachedBlockedSetHash =
cachedBlockedSetTimer = undefined;
};
// https://github.com/chrisaljoudi/uBlock/issues/399
// https://github.com/gorhill/uBlock/issues/2848
// Use a user stylesheet to collapse placeholders.
const getCollapseToken = ( ) => {
if ( collapseToken === undefined ) {
collapseToken = vAPI.randomToken();
vAPI.userStylesheet.add(
`[${collapseToken}]\n{display:none!important;}`,
true
);
}
return collapseToken;
};
let collapseToken;
// https://github.com/chrisaljoudi/uBlock/issues/174
// Do not remove fragment from src URL
const onProcessed = function(response) {
// This happens if uBO is disabled or restarted.
if ( response instanceof Object === false ) {
toCollapse.clear();
return;
}
const targets = toCollapse.get(response.id);
if ( targets === undefined ) { return; }
toCollapse.delete(response.id);
if ( cachedBlockedSetHash !== response.hash ) {
cachedBlockedSet = new Set(response.blockedResources);
cachedBlockedSetHash = response.hash;
if ( cachedBlockedSetTimer !== undefined ) {
clearTimeout(cachedBlockedSetTimer);
}
cachedBlockedSetTimer = vAPI.setTimeout(cachedBlockedSetClear, 30000);
}
if ( cachedBlockedSet === undefined || cachedBlockedSet.size === 0 ) {
return;
}
const selectors = [];
let netSelectorCacheCountMax = response.netSelectorCacheCountMax;
for ( const target of targets ) {
const tag = target.localName;
let prop = src1stProps[tag];
if ( prop === undefined ) { continue; }
let src = target[prop];
if ( typeof src !== 'string' || src.length === 0 ) {
prop = src2ndProps[tag];
if ( prop === undefined ) { continue; }
src = target[prop];
if ( typeof src !== 'string' || src.length === 0 ) { continue; }
}
if ( cachedBlockedSet.has(tagToTypeMap[tag] + ' ' + src) === false ) {
continue;
}
target.setAttribute(getCollapseToken(), '');
// https://github.com/chrisaljoudi/uBlock/issues/1048
// Use attribute to construct CSS rule
if ( netSelectorCacheCount > netSelectorCacheCountMax ) { continue; }
const value = target.getAttribute(prop);
if ( value ) {
selectors.push(`${tag}[${prop}="${CSS.escape(value)}"]`);
netSelectorCacheCount += 1;
}
}
if ( selectors.length === 0 ) { return; }
messaging.send('contentscript', {
what: 'cosmeticFiltersInjected',
type: 'net',
hostname: window.location.hostname,
selectors,
});
};
const send = function() {
processTimer = undefined;
toCollapse.set(resquestIdGenerator, toProcess);
messaging.send('contentscript', {
what: 'getCollapsibleBlockedRequests',
id: resquestIdGenerator,
frameURL: window.location.href,
resources: toFilter,
hash: cachedBlockedSetHash,
}).then(response => {
onProcessed(response);
});
toProcess = [];
toFilter = [];
resquestIdGenerator += 1;
};
<|fim▁hole|> clearTimeout(processTimer);
}
send();
} else if ( processTimer === undefined ) {
processTimer = vAPI.setTimeout(send, delay || 20);
}
};
const add = function(target) {
toProcess[toProcess.length] = target;
};
const addMany = function(targets) {
for ( const target of targets ) {
add(target);
}
};
const iframeSourceModified = function(mutations) {
for ( const mutation of mutations ) {
addIFrame(mutation.target, true);
}
process();
};
const iframeSourceObserver = new MutationObserver(iframeSourceModified);
const iframeSourceObserverOptions = {
attributes: true,
attributeFilter: [ 'src' ]
};
// https://github.com/gorhill/uBlock/issues/162
// Be prepared to deal with possible change of src attribute.
const addIFrame = function(iframe, dontObserve) {
if ( dontObserve !== true ) {
iframeSourceObserver.observe(iframe, iframeSourceObserverOptions);
}
const src = iframe.src;
if ( typeof src !== 'string' || src === '' ) { return; }
if ( src.startsWith('http') === false ) { return; }
toFilter.push({ type: 'sub_frame', url: iframe.src });
add(iframe);
};
const addIFrames = function(iframes) {
for ( const iframe of iframes ) {
addIFrame(iframe);
}
};
const onResourceFailed = function(ev) {
if ( tagToTypeMap[ev.target.localName] !== undefined ) {
add(ev.target);
process();
}
};
const stop = function() {
document.removeEventListener('error', onResourceFailed, true);
if ( processTimer !== undefined ) {
clearTimeout(processTimer);
}
if ( vAPI.domWatcher instanceof Object ) {
vAPI.domWatcher.removeListener(domWatcherInterface);
}
vAPI.shutdown.remove(stop);
vAPI.domCollapser = null;
};
const start = function() {
if ( vAPI.domWatcher instanceof Object ) {
vAPI.domWatcher.addListener(domWatcherInterface);
}
};
const domWatcherInterface = {
onDOMCreated: function() {
if ( self.vAPI instanceof Object === false ) { return; }
if ( vAPI.domCollapser instanceof Object === false ) {
if ( vAPI.domWatcher instanceof Object ) {
vAPI.domWatcher.removeListener(domWatcherInterface);
}
return;
}
// Listener to collapse blocked resources.
// - Future requests not blocked yet
// - Elements dynamically added to the page
// - Elements which resource URL changes
// https://github.com/chrisaljoudi/uBlock/issues/7
// Preferring getElementsByTagName over querySelectorAll:
// http://jsperf.com/queryselectorall-vs-getelementsbytagname/145
const elems = document.images ||
document.getElementsByTagName('img');
for ( const elem of elems ) {
if ( elem.complete ) {
add(elem);
}
}
addMany(document.embeds || document.getElementsByTagName('embed'));
addMany(document.getElementsByTagName('object'));
addIFrames(document.getElementsByTagName('iframe'));
process(0);
document.addEventListener('error', onResourceFailed, true);
vAPI.shutdown.add(stop);
},
onDOMChanged: function(addedNodes) {
if ( addedNodes.length === 0 ) { return; }
for ( const node of addedNodes ) {
if ( node.localName === 'iframe' ) {
addIFrame(node);
}
if ( node.firstElementChild === null ) { continue; }
const iframes = node.getElementsByTagName('iframe');
if ( iframes.length !== 0 ) {
addIFrames(iframes);
}
}
process();
}
};
vAPI.domCollapser = { start };
}
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// vAPI.domSurveyor
{
const messaging = vAPI.messaging;
const queriedIds = new Set();
const queriedClasses = new Set();
const maxSurveyNodes = 65536;
const maxSurveyTimeSlice = 4;
const maxSurveyBuffer = 64;
let domFilterer,
hostname = '',
surveyCost = 0;
const pendingNodes = {
nodeLists: [],
buffer: [
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null,
],
j: 0,
accepted: 0,
iterated: 0,
stopped: false,
add: function(nodes) {
if ( nodes.length === 0 || this.accepted >= maxSurveyNodes ) {
return;
}
this.nodeLists.push(nodes);
this.accepted += nodes.length;
},
next: function() {
if ( this.nodeLists.length === 0 || this.stopped ) { return 0; }
const nodeLists = this.nodeLists;
let ib = 0;
do {
const nodeList = nodeLists[0];
let j = this.j;
let n = j + maxSurveyBuffer - ib;
if ( n > nodeList.length ) {
n = nodeList.length;
}
for ( let i = j; i < n; i++ ) {
this.buffer[ib++] = nodeList[j++];
}
if ( j !== nodeList.length ) {
this.j = j;
break;
}
this.j = 0;
this.nodeLists.shift();
} while ( ib < maxSurveyBuffer && nodeLists.length !== 0 );
this.iterated += ib;
if ( this.iterated >= maxSurveyNodes ) {
this.nodeLists = [];
this.stopped = true;
//console.info(`domSurveyor> Surveyed a total of ${this.iterated} nodes. Enough.`);
}
return ib;
},
hasNodes: function() {
return this.nodeLists.length !== 0;
},
};
// Extract all classes/ids: these will be passed to the cosmetic
// filtering engine, and in return we will obtain only the relevant
// CSS selectors.
const reWhitespace = /\s/;
// https://github.com/gorhill/uBlock/issues/672
// http://www.w3.org/TR/2014/REC-html5-20141028/infrastructure.html#space-separated-tokens
// http://jsperf.com/enumerate-classes/6
const surveyPhase1 = function() {
// console.log('dom surveyor/surveying');
const t0 = performance.now();
const rews = reWhitespace;
const ids = [];
const classes = [];
const nodes = pendingNodes.buffer;
const deadline = t0 + maxSurveyTimeSlice;
let qids = queriedIds;
let qcls = queriedClasses;
let processed = 0;
for (;;) {
const n = pendingNodes.next();
if ( n === 0 ) { break; }
for ( let i = 0; i < n; i++ ) {
const node = nodes[i]; nodes[i] = null;
let v = node.id;
if ( typeof v === 'string' && v.length !== 0 ) {
v = v.trim();
if ( qids.has(v) === false && v.length !== 0 ) {
ids.push(v); qids.add(v);
}
}
let vv = node.className;
if ( typeof vv === 'string' && vv.length !== 0 ) {
if ( rews.test(vv) === false ) {
if ( qcls.has(vv) === false ) {
classes.push(vv); qcls.add(vv);
}
} else {
vv = node.classList;
let j = vv.length;
while ( j-- ) {
const v = vv[j];
if ( qcls.has(v) === false ) {
classes.push(v); qcls.add(v);
}
}
}
}
}
processed += n;
if ( performance.now() >= deadline ) { break; }
}
const t1 = performance.now();
surveyCost += t1 - t0;
//console.info(`domSurveyor> Surveyed ${processed} nodes in ${(t1-t0).toFixed(2)} ms`);
// Phase 2: Ask main process to lookup relevant cosmetic filters.
if ( ids.length !== 0 || classes.length !== 0 ) {
messaging.send('contentscript', {
what: 'retrieveGenericCosmeticSelectors',
hostname,
ids,
classes,
exceptions: domFilterer.exceptions,
cost: surveyCost,
}).then(response => {
surveyPhase3(response);
});
} else {
surveyPhase3(null);
}
//console.timeEnd('dom surveyor/surveying');
};
const surveyTimer = new vAPI.SafeAnimationFrame(surveyPhase1);
// This is to shutdown the surveyor if result of surveying keeps being
// fruitless. This is useful on long-lived web page. I arbitrarily
// picked 5 minutes before the surveyor is allowed to shutdown. I also
// arbitrarily picked 256 misses before the surveyor is allowed to
// shutdown.
let canShutdownAfter = Date.now() + 300000,
surveyingMissCount = 0;
// Handle main process' response.
const surveyPhase3 = function(response) {
const result = response && response.result;
let mustCommit = false;
if ( result ) {
const css = result.injectedCSS;
if ( typeof css === 'string' && css.length !== 0 ) {
domFilterer.addCSS(css);
//ADN tmp fix: hiding - local iframe without src
/* old adn solution
const isSpecialLocalIframes = (location.href=="about:blank" || location.href=="") && (window.self !== window.top)
domFilterer.addCSSRule(
selectors,
vAPI.hideStyle,
{ mustInject: isSpecialLocalIframes ? true : false } // ADN
);
*/
mustCommit = true;
}
const selectors = result.excepted;
if ( Array.isArray(selectors) && selectors.length !== 0 ) {
domFilterer.exceptCSSRules(selectors);
}
// ADN: ad check on new elements found
let allSelectors = "";
for(const key in result) {
if(result[key] != "") {
let injected = result[key];
let selectors = injected.split("\n{display:none!important;}")[0]
allSelectors += (allSelectors == "" ? "" : ",") + selectors;
}
}
let nodes;
if (allSelectors != "") {
nodes = document.querySelectorAll(allSelectors);
for ( const node of nodes ) {
vAPI.adCheck && vAPI.adCheck(node);
}
}
}
if ( pendingNodes.stopped === false ) {
if ( pendingNodes.hasNodes() ) {
surveyTimer.start(1);
bootstrapAdnTimer.start(1); // ADN
}
if ( mustCommit ) {
surveyingMissCount = 0;
canShutdownAfter = Date.now() + 300000;
return;
}
surveyingMissCount += 1;
if ( surveyingMissCount < 256 || Date.now() < canShutdownAfter ) {
return;
}
}
//console.info('dom surveyor shutting down: too many misses');
surveyTimer.clear();
bootstrapAdnTimer.clear(); // ADN
vAPI.domWatcher.removeListener(domWatcherInterface);
vAPI.domSurveyor = null;
};
const domWatcherInterface = {
onDOMCreated: function() {
if (
self.vAPI instanceof Object === false ||
vAPI.domSurveyor instanceof Object === false ||
vAPI.domFilterer instanceof Object === false
) {
if ( self.vAPI instanceof Object ) {
if ( vAPI.domWatcher instanceof Object ) {
vAPI.domWatcher.removeListener(domWatcherInterface);
}
vAPI.domSurveyor = null;
}
return;
}
//console.time('dom surveyor/dom layout created');
domFilterer = vAPI.domFilterer;
pendingNodes.add(document.querySelectorAll('[id],[class]'));
surveyTimer.start();
bootstrapAdnTimer.start(); // ADN
//console.timeEnd('dom surveyor/dom layout created');
},
onDOMChanged: function(addedNodes) {
if ( addedNodes.length === 0 ) { return; }
//console.time('dom surveyor/dom layout changed');
let i = addedNodes.length;
while ( i-- ) {
const node = addedNodes[i];
pendingNodes.add([ node ]);
if ( node.firstElementChild === null ) { continue; }
pendingNodes.add(node.querySelectorAll('[id],[class]'));
}
if ( pendingNodes.hasNodes() ) {
surveyTimer.start(1);
bootstrapAdnTimer.start(1); // ADN
}
}
};
const start = function(details) {
if ( vAPI.domWatcher instanceof Object === false ) { return; }
hostname = details.hostname;
vAPI.domWatcher.addListener(domWatcherInterface);
};
vAPI.domSurveyor = { start };
}
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
// ADN function to go through the selectors from bootstrapPhase2 and run the ad check on the detected ad nodes
// https://github.com/dhowe/AdNauseam/issues/1838
// avoid running too many times;
const intervalTime = window.self === window.top ? 4000 : 8000
const maxTimesRunBootstrapPhaseAdn = window.self === window.top ? 64 : 8;
var lastRunBootstrapPhaseAdn = null;
var bootstrapPhaseAdnCounter = 0
const bootstrapPhaseAdn = function () {
// check last time ran
let now = Date.now()
// run if its first time running or if the last time it ran was more than 1 sec ago
if (lastRunBootstrapPhaseAdn === null || (lastRunBootstrapPhaseAdn && now - lastRunBootstrapPhaseAdn > intervalTime)) {
// avoid it running too many times;
if (bootstrapPhaseAdnCounter >= maxTimesRunBootstrapPhaseAdn) {
bootstrapAdnTimer.clear();
return;
}
lastRunBootstrapPhaseAdn = Date.now()
// get all the selectores
let allSelectors = vAPI.domFilterer.getAllSelectors()
if (allSelectors.declarative) {
allSelectors.declarative.forEach(function([selectors, styles]){
let nodes = document.querySelectorAll(selectors);
for ( const node of nodes ) {
vAPI.adCheck && vAPI.adCheck(node);
}
})
}
bootstrapPhaseAdnCounter++;
}
}
// vAPI.bootstrap:
// Bootstrapping allows all components of the content script
// to be launched if/when needed.
// create BootstraAdnTimer, to use the "SafeAnimationFrame" class when doing the delays
const bootstrapAdnTimer = new vAPI.SafeAnimationFrame(bootstrapPhaseAdn)
const bootstrapPhase2 = function() {
/*
ADN catch ads with delay: https://github.com/dhowe/AdNauseam/issues/1838
This is a workaround to catch ads that apear with a certain delay but don't trigger the DomWatcher, such as dockduckgo
*/
if (vAPI.domFilterer) {
bootstrapPhaseAdn()
bootstrapAdnTimer.start(2000)
}
// This can happen on Firefox. For instance:
// https://github.com/gorhill/uBlock/issues/1893
if ( window.location === null ) { return; }
if ( self.vAPI instanceof Object === false ) { return; }
vAPI.messaging.send('contentscript', {
what: 'shouldRenderNoscriptTags',
});
if ( vAPI.domWatcher instanceof Object ) {
vAPI.domWatcher.start();
}
// Element picker works only in top window for now.
if (
window !== window.top ||
vAPI.domFilterer instanceof Object === false
) {
return;
}
// To be used by element picker/zapper.
vAPI.mouseClick = { x: -1, y: -1 };
const onMouseClick = function(ev) {
if ( ev.isTrusted === false ) { return; }
vAPI.mouseClick.x = ev.clientX;
vAPI.mouseClick.y = ev.clientY;
// https://github.com/chrisaljoudi/uBlock/issues/1143
// Find a link under the mouse, to try to avoid confusing new tabs
// as nuisance popups.
// https://github.com/uBlockOrigin/uBlock-issues/issues/777
// Mind that href may not be a string.
const elem = ev.target.closest('a[href]');
if ( elem === null || typeof elem.href !== 'string' ) { return; }
vAPI.messaging.send('contentscript', {
what: 'maybeGoodPopup',
url: elem.href || '',
});
};
document.addEventListener('mousedown', onMouseClick, true);
// https://github.com/gorhill/uMatrix/issues/144
vAPI.shutdown.add(function() {
document.removeEventListener('mousedown', onMouseClick, true);
});
};
// https://github.com/uBlockOrigin/uBlock-issues/issues/403
// If there was a spurious port disconnection -- in which case the
// response is expressly set to `null`, rather than undefined or
// an object -- let's stay around, we may be given the opportunity
// to try bootstrapping again later.
const bootstrapPhase1 = function(response) {
if ( response instanceof Object === false ) { return; }
vAPI.bootstrap = undefined;
if (response && response.prefs) vAPI.prefs = response.prefs; // ADN
// cosmetic filtering engine aka 'cfe'
const cfeDetails = response && response.specificCosmeticFilters;
if ( !cfeDetails || !cfeDetails.ready ) {
vAPI.domWatcher = vAPI.domCollapser = vAPI.domFilterer =
vAPI.domSurveyor = vAPI.domIsLoaded = null;
return;
}
vAPI.domCollapser.start();
if ( response.noCosmeticFiltering || response.prefs.hidingDisabled) { // ADN
vAPI.domFilterer = null;
vAPI.domSurveyor = null;
} else {
const domFilterer = vAPI.domFilterer = new vAPI.DOMFilterer();
if ( response.noGenericCosmeticFiltering || cfeDetails.noDOMSurveying ) {
vAPI.domSurveyor = null;
}
domFilterer.exceptions = cfeDetails.exceptionFilters;
domFilterer.addCSS(cfeDetails.injectedCSS);
domFilterer.addProceduralSelectors(cfeDetails.proceduralFilters);
domFilterer.exceptCSSRules(cfeDetails.exceptedFilters);
}
vAPI.userStylesheet.apply();
// Library of resources is located at:
// https://github.com/gorhill/uBlock/blob/master/assets/ublock/resources.txt
if ( response.scriptlets ) {
vAPI.injectScriptlet(document, response.scriptlets);
vAPI.injectedScripts = response.scriptlets;
}
if ( vAPI.domSurveyor instanceof Object ) {
vAPI.domSurveyor.start(cfeDetails);
}
// https://github.com/chrisaljoudi/uBlock/issues/587
// If no filters were found, maybe the script was injected before
// uBlock's process was fully initialized. When this happens, pages
// won't be cleaned right after browser launch.
if (
typeof document.readyState === 'string' &&
document.readyState !== 'loading'
) {
bootstrapPhase2();
} else {
document.addEventListener(
'DOMContentLoaded',
bootstrapPhase2,
{ once: true }
);
}
};
vAPI.bootstrap = function() {
vAPI.messaging.send('contentscript', {
what: 'retrieveContentScriptParameters',
url: vAPI.effectiveSelf.location.href,
}).then(response => {
bootstrapPhase1(response);
});
};
// })()
// This starts bootstrap process.
vAPI.bootstrap();
/******************************************************************************/
/******************************************************************************/
/******************************************************************************/
}
// <<<<<<<< end of HUGE-IF-BLOCK<|fim▁end|>
|
const process = function(delay) {
if ( toProcess.length === 0 ) { return; }
if ( delay === 0 ) {
if ( processTimer !== undefined ) {
|
<|file_name|>fdiv_ST1_ST1.java<|end_file_name|><|fim▁begin|>/*
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine
Copyright (C) 2012-2013 Ian Preston
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Details (including contact information) can be found at:
jpc.sourceforge.net
or the developer website
sourceforge.net/projects/jpc/
End of licence header
*/
package org.jpc.emulator.execution.opcodes.vm;
import org.jpc.emulator.execution.*;
import org.jpc.emulator.execution.decoder.*;
import org.jpc.emulator.processor.*;
import org.jpc.emulator.processor.fpu64.*;
import static org.jpc.emulator.processor.Processor.*;
public class fdiv_ST1_ST1 extends Executable
{
public fdiv_ST1_ST1(int blockStart, int eip, int prefices, PeekableInputStream input)
{
super(blockStart, eip);
int modrm = input.readU8();
}
public Branch execute(Processor cpu)
{
double freg0 = cpu.fpu.ST(1);
double freg1 = cpu.fpu.ST(1);
if (((freg0 == 0.0) && (freg1 == 0.0)) || (Double.isInfinite(freg0) && Double.isInfinite(freg1)))
cpu.fpu.setInvalidOperation();
if ((freg0 == 0.0) && !Double.isNaN(freg1) && !Double.isInfinite(freg1))
cpu.fpu.setZeroDivide();
cpu.fpu.setST(1, freg0/freg1);
return Branch.None;
}
public boolean isBranch()
{
return false;
}
<|fim▁hole|>}<|fim▁end|>
|
public String toString()
{
return this.getClass().getName();
}
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
from .async import TelnetServer
|
<|file_name|>OEngineLocalPaginated.java<|end_file_name|><|fim▁begin|>/*
*
* * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com)
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *<|fim▁hole|> * * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
* *
* * For more information: http://www.orientechnologies.com
*
*/
package com.orientechnologies.orient.core.engine.local;
import java.util.Map;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.config.OGlobalConfiguration;
import com.orientechnologies.orient.core.engine.OEngineAbstract;
import com.orientechnologies.orient.core.exception.ODatabaseException;
import com.orientechnologies.orient.core.storage.OStorage;
import com.orientechnologies.orient.core.storage.cache.local.O2QCache;
import com.orientechnologies.orient.core.storage.impl.local.paginated.OLocalPaginatedStorage;
/**
* @author Andrey Lomakin
* @since 28.03.13
*/
public class OEngineLocalPaginated extends OEngineAbstract {
public static final String NAME = "plocal";
private final O2QCache readCache;
public OEngineLocalPaginated() {
readCache = new O2QCache(
(long) (OGlobalConfiguration.DISK_CACHE_SIZE.getValueAsLong() * 1024 * 1024 * ((100 - OGlobalConfiguration.DISK_WRITE_CACHE_PART
.getValueAsInteger()) / 100.0)), OGlobalConfiguration.DISK_CACHE_PAGE_SIZE.getValueAsInteger() * 1024, true);
try {
readCache.registerMBean();
} catch (Exception e) {
OLogManager.instance().error(this, "MBean for read cache cannot be registered", e);
}
}
public OStorage createStorage(final String dbName, final Map<String, String> configuration) {
try {
// GET THE STORAGE
return new OLocalPaginatedStorage(dbName, dbName, getMode(configuration), generateStorageId(), readCache);
} catch (Throwable t) {
OLogManager.instance().error(this,
"Error on opening database: " + dbName + ". Current location is: " + new java.io.File(".").getAbsolutePath(), t,
ODatabaseException.class);
}
return null;
}
public String getName() {
return NAME;
}
public boolean isShared() {
return true;
}
@Override
public void shutdown() {
super.shutdown();
readCache.clear();
try {
readCache.unregisterMBean();
} catch (Exception e) {
OLogManager.instance().error(this, "MBean for read cache cannot be unregistered", e);
}
}
}<|fim▁end|>
| |
<|file_name|>nine.py<|end_file_name|><|fim▁begin|>from itertools import combinations
def is_good(n):
return 1 + ((int(n) - 1) % 9) == 9
def generate_subsequences(n):
subsequences = []
combinations_list = []
index = 4
#Generate all combinations
while index > 0:
combinations_list.append(list(combinations(str(n), index)))
index -= 1
#Formatting combinations
for index in combinations_list:
for combination in index:
subsequences.append(''.join(combination))
return subsequences<|fim▁hole|> modulo = ((10 ** 9) + 7)
#Get number of cases
cases = int(raw_input())
while cases > 0:
value = raw_input()
good_subsequences = 0
for sub in generate_subsequences(value):
if is_good(sub):
good_subsequences += 1
print (good_subsequences % modulo)-1
cases -= 1<|fim▁end|>
|
if __name__ == '__main__':
#The modulo
|
<|file_name|>SqlConnectionDialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2009 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing a dialog to enter the connection parameters.
"""
from __future__ import unicode_literals
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QDialog, QDialogButtonBox
from PyQt5.QtSql import QSqlDatabase
from E5Gui.E5Completers import E5FileCompleter
from E5Gui import E5FileDialog
from .Ui_SqlConnectionDialog import Ui_SqlConnectionDialog
import Utilities
import UI.PixmapCache
class SqlConnectionDialog(QDialog, Ui_SqlConnectionDialog):
"""
Class implementing a dialog to enter the connection parameters.
"""
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent widget (QWidget)
"""
super(SqlConnectionDialog, self).__init__(parent)
self.setupUi(self)
self.databaseFileButton.setIcon(UI.PixmapCache.getIcon("open.png"))
self.databaseFileCompleter = E5FileCompleter()
self.okButton = self.buttonBox.button(QDialogButtonBox.Ok)
drivers = QSqlDatabase.drivers()
# remove compatibility names
if "QMYSQL3" in drivers:
drivers.remove("QMYSQL3")
if "QOCI8" in drivers:
drivers.remove("QOCI8")<|fim▁hole|> if "QODBC3" in drivers:
drivers.remove("QODBC3")
if "QPSQL7" in drivers:
drivers.remove("QPSQL7")
if "QTDS7" in drivers:
drivers.remove("QTDS7")
self.driverCombo.addItems(drivers)
self.__updateDialog()
msh = self.minimumSizeHint()
self.resize(max(self.width(), msh.width()), msh.height())
def __updateDialog(self):
"""
Private slot to update the dialog depending on its contents.
"""
driver = self.driverCombo.currentText()
if driver.startswith("QSQLITE"):
self.databaseEdit.setCompleter(self.databaseFileCompleter)
self.databaseFileButton.setEnabled(True)
else:
self.databaseEdit.setCompleter(None)
self.databaseFileButton.setEnabled(False)
if self.databaseEdit.text() == "" or driver == "":
self.okButton.setEnabled(False)
else:
self.okButton.setEnabled(True)
@pyqtSlot(str)
def on_driverCombo_activated(self, txt):
"""
Private slot handling the selection of a database driver.
@param txt text of the driver combo (string)
"""
self.__updateDialog()
@pyqtSlot(str)
def on_databaseEdit_textChanged(self, txt):
"""
Private slot handling the change of the database name.
@param txt text of the edit (string)
"""
self.__updateDialog()
@pyqtSlot()
def on_databaseFileButton_clicked(self):
"""
Private slot to open a database file via a file selection dialog.
"""
startdir = self.databaseEdit.text()
dbFile = E5FileDialog.getOpenFileName(
self,
self.tr("Select Database File"),
startdir,
self.tr("All Files (*)"))
if dbFile:
self.databaseEdit.setText(Utilities.toNativeSeparators(dbFile))
def getData(self):
"""
Public method to retrieve the connection data.
@return tuple giving the driver name (string), the database name
(string), the user name (string), the password (string), the
host name (string) and the port (integer)
"""
return (
self.driverCombo.currentText(),
self.databaseEdit.text(),
self.usernameEdit.text(),
self.passwordEdit.text(),
self.hostnameEdit.text(),
self.portSpinBox.value(),
)<|fim▁end|>
| |
<|file_name|>convert.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from lxml import etree
import os, sys, logging, subprocess
from os.path import realpath, dirname
import xapian
sys.path.append('/home/liza/threepress')
from threepress import settings
db_dir = 'db/'
main_db = 'threepress'
logging.basicConfig(level=logging.WARNING)
indexer = xapian.TermGenerator()
stemmer = xapian.Stem("english")
indexer.set_stemmer(stemmer)
if len(sys.argv) < 3:
logging.error("Usage: convert.py xml-file xslt [re-index]")
sys.exit(1)
xml = sys.argv[1]
xsl = sys.argv[2]
reindex = False
if len(sys.argv) > 3:
reindex = True
tei_xsl = 'xsl/tei-xsl-5.9/p5/xhtml/tei.xsl'
fo_xsl = 'xsl/tei-xsl-5.9/p5/fo/tei.xsl'
fop = '/usr/local/bin/fop'
out_file = xml.replace('src', 'tei')
out = open(out_file, 'w')
schema = 'src/teilite.xsd'
xmlschema_doc = etree.parse(schema)
xmlschema = etree.XMLSchema(xmlschema_doc)
tree = etree.parse(xml)
xslt = etree.parse(xsl)
root = tree.xslt(xslt)
# Check the document
try:
xmlschema.assertValid(root)
except etree.DocumentInvalid, e:
logging.error(e)
for element in root.iter():
if element.text:
element.text = element.text.replace('--', u'—')
element.text = element.text.replace("'", u'’')
element.text = element.text.replace('`', u'‘')
words = []
is_open = False
for index, word in enumerate(element.text.split(' ')):
if index == 0 and '"' in word:
# Definitely an open quote at the beginning
word = word.replace('"', u'“')
is_open = True
else:
if '"' in word and is_open:
word = word.replace('"', u'”')
is_open = False
elif '"' in word and not is_open:
word = word.replace('"', u'“')
is_open = True
words.append(word)
element.text = ' '.join([word for word in words])
main_database = xapian.WritableDatabase('%s/%s' % (db_dir, main_db), xapian.DB_CREATE_OR_OPEN)
id = root.xpath('/tei:TEI/@xml:id', namespaces={'tei':settings.TEI})[0]
if reindex:
# Open the database for update, creating a new database if necessary.
# Delete the old database
os.system('rm -rf %s/%s' % (db_dir, id))
database = xapian.WritableDatabase('%s/%s' % (db_dir, id), xapian.DB_CREATE_OR_OPEN)
body = root.xpath('//tei:body', namespaces={'tei':settings.TEI})[0]
for element in body.iter(tag='{%s}p' % settings.TEI):
para = element.text
doc = xapian.Document()
doc.set_data(para)
indexer.set_document(doc)
indexer.index_text(para)
# Add the document to the database.
para_id = element.xpath('@xml:id')[0].replace('id', '')
chapter_id = element.xpath('parent::tei:div[@type="chapter"]/@xml:id', namespaces={'tei':settings.TEI})[0]
# Chapter ID is value 0
doc.add_value(settings.SEARCH_CHAPTER_ID, chapter_id)
# Document title is value 2
doc.add_value(settings.SEARCH_DOCUMENT_TITLE, element.xpath('//tei:titleStmt/tei:title/text()', namespaces={'tei':settings.TEI})[0])
# Document ID is value 3
doc.add_value(settings.SEARCH_DOCUMENT_ID, id)
# Create the document with the paragraph ID from the XML
database.replace_document(int(para_id), doc)
main_database.replace_document(int(para_id), doc)
else:
logging.debug("Skipping re-index...")
out.write(etree.tostring(root, encoding='utf-8', pretty_print=True, xml_declaration=True))
out.close()
# Also transform it to FO<|fim▁hole|>fo_file = fo_file.replace('xml', 'fo')
logging.debug("Writing out to %s" % fo_file)
fo_out = open(fo_file, 'w')
xslt = etree.parse(fo_xsl)
fo = root.xslt(xslt)
fo_out.write(etree.tostring(fo, encoding='utf-8', pretty_print=True, xml_declaration=True))
fo_out.close()
pdf_file = "pdf/%s.pdf" % id
path = "%s/.." % realpath(dirname(sys.argv[0]))
pdf_file = "%s/%s" % (path, pdf_file)
fo_file = "%s/%s" % (path, fo_file)
logging.debug("Converting from FO %s to PDF as %s" % (fo_file, pdf_file))
subprocess.check_call([fop, '-r', fo_file, '-pdf', pdf_file])
logging.debug("Done.")<|fim▁end|>
|
fo_file = out_file
fo_file = fo_file.replace('tei/', 'fo/')
|
<|file_name|>factory.go<|end_file_name|><|fim▁begin|>package deployment
import (
"fmt"
"time"
kapi "github.com/GoogleCloudPlatform/kubernetes/pkg/api"
kclient "github.com/GoogleCloudPlatform/kubernetes/pkg/client"
"github.com/GoogleCloudPlatform/kubernetes/pkg/client/cache"
"github.com/GoogleCloudPlatform/kubernetes/pkg/fields"
"github.com/GoogleCloudPlatform/kubernetes/pkg/labels"
"github.com/GoogleCloudPlatform/kubernetes/pkg/runtime"
kutil "github.com/GoogleCloudPlatform/kubernetes/pkg/util"
"github.com/GoogleCloudPlatform/kubernetes/pkg/watch"
controller "github.com/openshift/origin/pkg/controller"
deployapi "github.com/openshift/origin/pkg/deploy/api"
deployutil "github.com/openshift/origin/pkg/deploy/util"
)
// DeploymentControllerFactory can create a DeploymentController that creates
// deployer pods in a configurable way.
type DeploymentControllerFactory struct {
// KubeClient is a Kubernetes client.
KubeClient kclient.Interface
// Codec is used for encoding/decoding.
Codec runtime.Codec
// Environment is a set of environment which should be injected into all deployer pod containers.
Environment []kapi.EnvVar
// RecreateStrategyImage specifies which Docker image which should implement the Recreate strategy.
RecreateStrategyImage string
}
// Create creates a DeploymentController.
func (factory *DeploymentControllerFactory) Create() controller.RunnableController {
deploymentLW := &deployutil.ListWatcherImpl{
ListFunc: func() (runtime.Object, error) {
return factory.KubeClient.ReplicationControllers(kapi.NamespaceAll).List(labels.Everything())
},
WatchFunc: func(resourceVersion string) (watch.Interface, error) {
return factory.KubeClient.ReplicationControllers(kapi.NamespaceAll).Watch(labels.Everything(), fields.Everything(), resourceVersion)
},
}
deploymentQueue := cache.NewFIFO(cache.MetaNamespaceKeyFunc)
cache.NewReflector(deploymentLW, &kapi.ReplicationController{}, deploymentQueue, 2*time.Minute).Run()
deployController := &DeploymentController{
deploymentClient: &deploymentClientImpl{
getDeploymentFunc: func(namespace, name string) (*kapi.ReplicationController, error) {
return factory.KubeClient.ReplicationControllers(namespace).Get(name)
},
updateDeploymentFunc: func(namespace string, deployment *kapi.ReplicationController) (*kapi.ReplicationController, error) {
return factory.KubeClient.ReplicationControllers(namespace).Update(deployment)
},
},
podClient: &podClientImpl{
createPodFunc: func(namespace string, pod *kapi.Pod) (*kapi.Pod, error) {
return factory.KubeClient.Pods(namespace).Create(pod)
},
deletePodFunc: func(namespace, name string) error {
return factory.KubeClient.Pods(namespace).Delete(name)
},
},
makeContainer: func(strategy *deployapi.DeploymentStrategy) (*kapi.Container, error) {
return factory.makeContainer(strategy)
},
decodeConfig: func(deployment *kapi.ReplicationController) (*deployapi.DeploymentConfig, error) {
return deployutil.DecodeDeploymentConfig(deployment, factory.Codec)
},
}
return &controller.RetryController{
Queue: deploymentQueue,
RetryManager: controller.NewQueueRetryManager(
deploymentQueue,
cache.MetaNamespaceKeyFunc,
func(obj interface{}, err error, count int) bool {
if _, isFatal := err.(fatalError); isFatal {
kutil.HandleError(err)
return false<|fim▁hole|> return true
},
kutil.NewTokenBucketRateLimiter(1, 10),
),
Handle: func(obj interface{}) error {
deployment := obj.(*kapi.ReplicationController)
return deployController.Handle(deployment)
},
}
}
// makeContainer creates containers in the following way:
//
// 1. For the Recreate strategy, use the factory's RecreateStrategyImage as
// the container image, and the factory's Environment as the container
// environment.
// 2. For all Custom strategy, use the strategy's image for the container
// image, and use the combination of the factory's Environment and the
// strategy's environment as the container environment.
//
// An error is returned if the deployment strategy type is not supported.
func (factory *DeploymentControllerFactory) makeContainer(strategy *deployapi.DeploymentStrategy) (*kapi.Container, error) {
// Set default environment values
environment := []kapi.EnvVar{}
for _, env := range factory.Environment {
environment = append(environment, env)
}
// Every strategy type should be handled here.
switch strategy.Type {
case deployapi.DeploymentStrategyTypeRecreate:
// Use the factory-configured image.
return &kapi.Container{
Image: factory.RecreateStrategyImage,
Env: environment,
}, nil
case deployapi.DeploymentStrategyTypeCustom:
// Use user-defined values from the strategy input.
for _, env := range strategy.CustomParams.Environment {
environment = append(environment, env)
}
return &kapi.Container{
Image: strategy.CustomParams.Image,
Env: environment,
}, nil
default:
return nil, fmt.Errorf("unsupported deployment strategy type: %s", strategy.Type)
}
}<|fim▁end|>
|
}
if count > 1 {
return false
}
|
<|file_name|>registration.py<|end_file_name|><|fim▁begin|>class FieldRegistry(object):
_registry = {}
def add_field(self, model, field):
reg = self.__class__._registry.setdefault(model, [])
reg.append(field)
<|fim▁hole|> return self.__class__._registry.get(model, [])
def __contains__(self, model):
return model in self.__class__._registry<|fim▁end|>
|
def get_fields(self, model):
|
<|file_name|>field_data.py<|end_file_name|><|fim▁begin|>"""
:class:`~xblock.field_data.FieldData` subclasses used by the LMS
"""
from xblock.field_data import ReadOnlyFieldData, SplitFieldData
from xblock.fields import Scope
def lms_field_data(authored_data, student_data):
"""<|fim▁hole|> and all UserScope.NONE fields from `authored_data`. It also prevents
writing to `authored_data`.
"""
authored_data = ReadOnlyFieldData(authored_data)
return SplitFieldData({
Scope.content: authored_data,
Scope.settings: authored_data,
Scope.parent: authored_data,
Scope.children: authored_data,
Scope.user_state_summary: student_data,
Scope.user_state: student_data,
Scope.user_info: student_data,
Scope.preferences: student_data,
})<|fim▁end|>
|
Returns a new :class:`~xblock.field_data.FieldData` that
reads all UserScope.ONE and UserScope.ALL fields from `student_data`
|
<|file_name|>issue-79690.rs<|end_file_name|><|fim▁begin|>// ignore-32bit
// This test gives a different error on 32-bit architectures.
// stderr-per-bitwidth
union Transmute<T: Copy, U: Copy> {
t: T,
u: U,
}
trait Bar {
fn bar(&self) -> u32;
}
struct Foo {
foo: u32,
bar: bool,
}
impl Bar for Foo {
fn bar(&self) -> u32 {
self.foo
}
}
#[derive(Copy, Clone)]
struct Fat<'a>(&'a Foo, &'static VTable);
struct VTable {
size: Foo,
}
const FOO: &dyn Bar = &Foo {
foo: 128,
bar: false,
};
const G: Fat = unsafe { Transmute { t: FOO }.u };<|fim▁hole|>//~^ ERROR it is undefined behavior to use this value
fn main() {}<|fim▁end|>
| |
<|file_name|>ed25519_keys.py<|end_file_name|><|fim▁begin|>"""
<Program Name>
ed25519_keys.py
<Author>
Vladimir Diaz <[email protected]>
<Started>
September 24, 2013.
<Copyright>
See LICENSE for licensing information.
<Purpose>
The goal of this module is to support ed25519 signatures. ed25519 is an
elliptic-curve public key signature scheme, its main strength being small
signatures (64 bytes) and small public keys (32 bytes).
http://ed25519.cr.yp.to/
'ssl_crypto/ed25519_keys.py' calls 'ed25519.py', which is the pure Python
implementation of ed25519 optimized for a faster runtime. The Python
reference implementation is concise, but very slow (verifying signatures
takes ~9 seconds on an Intel core 2 duo @ 2.2 ghz x 2). The optimized
version can verify signatures in ~2 seconds.
http://ed25519.cr.yp.to/software.html
https://github.com/pyca/ed25519
Optionally, ed25519 cryptographic operations may be executed by PyNaCl, which
is a Python binding to the NaCl library and is faster than the pure python
implementation. Verifying signatures can take approximately 0.0009 seconds.
PyNaCl relies on the libsodium C library. PyNaCl is required for key and
signature generation. Verifying signatures may be done in pure Python.
https://github.com/pyca/pynacl
https://github.com/jedisct1/libsodium
http://nacl.cr.yp.to/
https://github.com/pyca/ed25519
The ed25519-related functions included here are generate(), create_signature()
and verify_signature(). The 'ed25519' and PyNaCl (i.e., 'nacl') modules used
by ed25519_keys.py perform the actual ed25519 computations and the functions
listed above can be viewed as an easy-to-use public interface.
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
# 'binascii' required for hexadecimal conversions. Signatures and
# public/private keys are hexlified.
import binascii
# TODO: The 'warnings' module needed to temporarily suppress user warnings
# raised by 'pynacl' (as of version 0.2.3). Warnings temporarily suppressed
# here to avoid confusing users with an unexpected error message that gives
# no indication of its source. These warnings are printed when using
# the repository tools, including for clients that request an update.
# http://docs.python.org/2/library/warnings.html#temporarily-suppressing-warnings
import warnings
# 'os' required to generate OS-specific randomness (os.urandom) suitable for
# cryptographic use.
# http://docs.python.org/2/library/os.html#miscellaneous-functions
import os
# Import the python implementation of the ed25519 algorithm provided by pyca,
# which is an optimized version of the one provided by ed25519's authors.
# Note: The pure Python version does not include protection against side-channel
# attacks. Verifying signatures can take approximately 2 seconds on an intel
# core 2 duo @ 2.2 ghz x 2). Optionally, the PyNaCl module may be used to
# speed up ed25519 cryptographic operations.
# http://ed25519.cr.yp.to/software.html
# https://github.com/pyca/ed25519
# https://github.com/pyca/pynacl
#
# Import the PyNaCl library, if available. It is recommended this library be
# used over the pure python implementation of ed25519, due to its speedier
# routines and side-channel protections available in the libsodium library.
#
# TODO: Version 0.2.3 of 'pynacl' prints: "UserWarning: reimporting '...' might
# overwrite older definitions." when importing 'nacl.signing'. Suppress user
# warnings temporarily (at least until this issue is fixed by PyNaCl).
#
# Note: A 'pragma: no cover' comment is intended for test 'coverage'. Lines
# or code blocks with this comment should not be flagged as uncovered.
# pynacl will always be install prior to running the unit tests.
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
import nacl.signing
import nacl.encoding
# PyNaCl's 'cffi' dependency may raise an 'IOError' exception when importing
# 'nacl.signing'.
except (ImportError, IOError): # pragma: no cover
pass
# The optimized pure Python implementation of ed25519 provided by TUF. If
# PyNaCl cannot be imported and an attempt to use is made in this module, a
# 'ssl_commons__exceptions.UnsupportedLibraryError' exception is raised.
from ._vendor.ed25519 import ed25519 as _vendor__ed25519__ed25519
# Digest objects needed to generate hashes.
from . import hash as ssl_crypto__hash
# Perform object format-checking.
from . import formats as ssl_crypto__formats
from ..ssl_commons import exceptions as ssl_commons__exceptions
# Supported ed25519 signing method: 'ed25519'. The pure Python implementation
# (i.e., ed25519') and PyNaCl (i.e., 'nacl', libsodium+Python bindings) modules
# are currently supported in the creationg of 'ed25519' signatures.
# Previously, a distinction was made between signatures made by the pure Python
# implementation and PyNaCl.
_SUPPORTED_ED25519_SIGNING_METHODS = ['ed25519']
def generate_public_and_private():
"""
<Purpose>
Generate a pair of ed25519 public and private keys with PyNaCl. The public
and private keys returned conform to 'ssl_crypto__formats.ED25519PULIC_SCHEMA' and
'ssl_crypto__formats.ED25519SEED_SCHEMA', respectively, and have the form:
'\xa2F\x99\xe0\x86\x80%\xc8\xee\x11\xb95T\xd9\...'
An ed25519 seed key is a random 32-byte string. Public keys are also 32
bytes.
>>> public, private = generate_public_and_private()
>>> ssl_crypto__formats.ED25519PUBLIC_SCHEMA.matches(public)
True
>>> ssl_crypto__formats.ED25519SEED_SCHEMA.matches(private)
True
<Arguments>
None.
<Exceptions>
ssl_commons__exceptions.UnsupportedLibraryError, if the PyNaCl ('nacl') module is unavailable.
NotImplementedError, if a randomness source is not found by 'os.urandom'.
<Side Effects>
The ed25519 keys are generated by first creating a random 32-byte seed
with os.urandom() and then calling PyNaCl's nacl.signing.SigningKey().
<Returns>
A (public, private) tuple that conform to 'ssl_crypto__formats.ED25519PUBLIC_SCHEMA'
and 'ssl_crypto__formats.ED25519SEED_SCHEMA', respectively.
"""
# Generate ed25519's seed key by calling os.urandom(). The random bytes
# returned should be suitable for cryptographic use and is OS-specific.
# Raise 'NotImplementedError' if a randomness source is not found.
# ed25519 seed keys are fixed at 32 bytes (256-bit keys).
# http://blog.mozilla.org/warner/2011/11/29/ed25519-keys/
seed = os.urandom(32)
public = None
# Generate the public key. PyNaCl (i.e., 'nacl' module) performs the actual
# key generation.
try:
nacl_key = nacl.signing.SigningKey(seed)
public = nacl_key.verify_key.encode(encoder=nacl.encoding.RawEncoder())
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise ssl_commons__exceptions.UnsupportedLibraryError(message)
return public, seed
def create_signature(public_key, private_key, data):
"""
<Purpose>
Return a (signature, method) tuple, where the method is 'ed25519' and is
always generated by PyNaCl (i.e., 'nacl'). The signature returned conforms
to 'ssl_crypto__formats.ED25519SIGNATURE_SCHEMA', and has the form:
'\xae\xd7\x9f\xaf\x95{bP\x9e\xa8YO Z\x86\x9d...'
A signature is a 64-byte string.
>>> public, private = generate_public_and_private()
>>> data = b'The quick brown fox jumps over the lazy dog'
>>> signature, method = \
create_signature(public, private, data)
>>> ssl_crypto__formats.ED25519SIGNATURE_SCHEMA.matches(signature)
True
>>> method == 'ed25519'
True
>>> signature, method = \
create_signature(public, private, data)
>>> ssl_crypto__formats.ED25519SIGNATURE_SCHEMA.matches(signature)
True
>>> method == 'ed25519'
True
<Arguments>
public:
The ed25519 public key, which is a 32-byte string.
private:
The ed25519 private key, which is a 32-byte string.
data:
Data object used by create_signature() to generate the signature.
<Exceptions>
ssl_commons__exceptions.FormatError, if the arguments are improperly formatted.
ssl_commons__exceptions.CryptoError, if a signature cannot be created.
<Side Effects>
nacl.signing.SigningKey.sign() called to generate the actual signature.
<Returns>
A signature dictionary conformat to 'ssl_crypto.format.SIGNATURE_SCHEMA'.
ed25519 signatures are 64 bytes, however, the hexlified signature is
stored in the dictionary returned.
"""
# Does 'public_key' have the correct format?
# This check will ensure 'public_key' conforms to
# 'ssl_crypto__formats.ED25519PUBLIC_SCHEMA', which must have length 32 bytes.
# Raise 'ssl_commons__exceptions.FormatError' if the check fails.
ssl_crypto__formats.ED25519PUBLIC_SCHEMA.check_match(public_key)
# Is 'private_key' properly formatted?
ssl_crypto__formats.ED25519SEED_SCHEMA.check_match(private_key)
# Signing the 'data' object requires a seed and public key.
# nacl.signing.SigningKey.sign() generates the signature.
public = public_key
private = private_key
method = None
signature = None
# The private and public keys have been validated above by 'ssl_crypto__formats' and
# should be 32-byte strings.
method = 'ed25519'
try:
nacl_key = nacl.signing.SigningKey(private)
nacl_sig = nacl_key.sign(data)
signature = nacl_sig.signature
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise ssl_commons__exceptions.UnsupportedLibraryError(message)
except (ValueError, TypeError, nacl.exceptions.CryptoError) as e:
message = 'An "ed25519" signature could not be created with PyNaCl.'
raise ssl_commons__exceptions.CryptoError(message + str(e))
return signature, method
def verify_signature(public_key, method, signature, data, use_pynacl=False):
"""
<Purpose>
Determine whether the private key corresponding to 'public_key' produced
'signature'. verify_signature() will use the public key, the 'method' and
'sig', and 'data' arguments to complete the verification.
>>> public, private = generate_public_and_private()
>>> data = b'The quick brown fox jumps over the lazy dog'
>>> signature, method = \
create_signature(public, private, data)
>>> verify_signature(public, method, signature, data, use_pynacl=False)
True
>>> verify_signature(public, method, signature, data, use_pynacl=True)
True
>>> bad_data = b'The sly brown fox jumps over the lazy dog'
>>> bad_signature, method = \
create_signature(public, private, bad_data)
>>> verify_signature(public, method, bad_signature, data, use_pynacl=False)
False
<Arguments>
public_key:
The public key is a 32-byte string.
method:
'ed25519' signature method generated by either the pure python
implementation (i.e., ed25519.py) or PyNacl (i.e., 'nacl').
signature:
The signature is a 64-byte string.
data:
Data object used by ssl_crypto.ed25519_keys.create_signature() to generate
'signature'. 'data' is needed here to verify the signature.
use_pynacl:
True, if the ed25519 signature should be verified by PyNaCl. False,
if the signature should be verified with the pure Python implementation
of ed25519 (slower).
<Exceptions>
ssl_commons__exceptions.UnknownMethodError. Raised if the signing method used by
'signature' is not one supported by ssl_crypto.ed25519_keys.create_signature().
ssl_commons__exceptions.FormatError. Raised if the arguments are improperly formatted.
<Side Effects>
ssl_crypto._vendor.ed25519.ed25519.checkvalid() called to do the actual
verification. nacl.signing.VerifyKey.verify() called if 'use_pynacl' is
True.
<Returns>
Boolean. True if the signature is valid, False otherwise.
"""
# Does 'public_key' have the correct format?
# This check will ensure 'public_key' conforms to
# 'ssl_crypto__formats.ED25519PUBLIC_SCHEMA', which must have length 32 bytes.
# Raise 'ssl_commons__exceptions.FormatError' if the check fails.
ssl_crypto__formats.ED25519PUBLIC_SCHEMA.check_match(public_key)
# Is 'method' properly formatted?
ssl_crypto__formats.NAME_SCHEMA.check_match(method)
# Is 'signature' properly formatted?
ssl_crypto__formats.ED25519SIGNATURE_SCHEMA.check_match(signature)
# Is 'use_pynacl' properly formatted?
ssl_crypto__formats.BOOLEAN_SCHEMA.check_match(use_pynacl)
# Verify 'signature'. Before returning the Boolean result,
# ensure 'ed25519' was used as the signing method.
# Raise 'ssl_commons__exceptions.UnsupportedLibraryError' if 'use_pynacl' is True but 'nacl' is
# unavailable.
public = public_key
valid_signature = False
if method in _SUPPORTED_ED25519_SIGNING_METHODS:
if use_pynacl:
try:
nacl_verify_key = nacl.signing.VerifyKey(public)
nacl_message = nacl_verify_key.verify(data, signature)
valid_signature = True
except NameError: # pragma: no cover
message = 'The PyNaCl library and/or its dependencies unavailable.'
raise ssl_commons__exceptions.UnsupportedLibraryError(message)
except nacl.exceptions.BadSignatureError:
pass
# Verify 'ed25519' signature with the pure Python implementation.
else:
try:
_vendor__ed25519__ed25519.checkvalid(signature, data, public)
valid_signature = True
# The pure Python implementation raises 'Exception' if 'signature' is
# invalid.
except Exception as e:
pass<|fim▁hole|> message = 'Unsupported ed25519 signing method: '+repr(method)+'.\n'+ \
'Supported methods: '+repr(_SUPPORTED_ED25519_SIGNING_METHODS)+'.'
raise ssl_commons__exceptions.UnknownMethodError(message)
return valid_signature
if __name__ == '__main__':
# The interactive sessions of the documentation strings can
# be tested by running 'ed25519_keys.py' as a standalone module.
# python -B ed25519_keys.py
import doctest
doctest.testmod()<|fim▁end|>
|
else:
|
<|file_name|>test_trainer_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2018 the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import unittest
import numpy as np
from transformers.file_utils import is_torch_available
from transformers.testing_utils import require_torch
if is_torch_available():
import torch
from torch import nn
from torch.utils.data import IterableDataset
from transformers.modeling_outputs import SequenceClassifierOutput
from transformers.tokenization_utils_base import BatchEncoding
from transformers.trainer_pt_utils import (
DistributedLengthGroupedSampler,
DistributedSamplerWithLoop,
DistributedTensorGatherer,
IterableDatasetShard,
LabelSmoother,
LengthGroupedSampler,
SequentialDistributedSampler,
ShardSampler,
get_parameter_names,
)
class TstLayer(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.linear1 = nn.Linear(hidden_size, hidden_size)
self.ln1 = nn.LayerNorm(hidden_size)
self.linear2 = nn.Linear(hidden_size, hidden_size)
self.ln2 = nn.LayerNorm(hidden_size)
self.bias = nn.Parameter(torch.zeros(hidden_size))
def forward(self, x):
h = self.ln1(nn.functional.relu(self.linear1(x)))
h = nn.functional.relu(self.linear2(x))
return self.ln2(x + h + self.bias)
class RandomIterableDataset(IterableDataset):
# For testing, an iterable dataset of random length
def __init__(self, p_stop=0.01, max_length=1000):
self.p_stop = p_stop
self.max_length = max_length
self.generator = torch.Generator()
def __iter__(self):
count = 0
stop = False
while not stop and count < self.max_length:
yield count
count += 1
number = torch.rand(1, generator=self.generator).item()
stop = number < self.p_stop
@require_torch
class TrainerUtilsTest(unittest.TestCase):
def test_distributed_tensor_gatherer(self):
# Simulate a result with a dataset of size 21, 4 processes and chunks of lengths 2, 3, 1
world_size = 4
num_samples = 21
input_indices = [
[0, 1, 6, 7, 12, 13, 18, 19],
[2, 3, 4, 8, 9, 10, 14, 15, 16, 20, 0, 1],
[5, 11, 17, 2],
]
predictions = np.random.normal(size=(num_samples, 13))
gatherer = DistributedTensorGatherer(world_size=world_size, num_samples=num_samples)
for indices in input_indices:
gatherer.add_arrays(predictions[indices])
result = gatherer.finalize()
self.assertTrue(np.array_equal(result, predictions))
# With nested tensors
gatherer = DistributedTensorGatherer(world_size=world_size, num_samples=num_samples)
for indices in input_indices:
gatherer.add_arrays([predictions[indices], [predictions[indices], predictions[indices]]])
result = gatherer.finalize()
self.assertTrue(isinstance(result, list))
self.assertTrue(len(result), 2)
self.assertTrue(isinstance(result[1], list))
self.assertTrue(len(result[1]), 2)
self.assertTrue(np.array_equal(result[0], predictions))
self.assertTrue(np.array_equal(result[1][0], predictions))
self.assertTrue(np.array_equal(result[1][1], predictions))
def test_distributed_tensor_gatherer_different_shapes(self):
# Simulate a result with a dataset of size 21, 4 processes and chunks of lengths 2, 3, 1
world_size = 4
num_samples = 21
input_indices = [
[0, 1, 6, 7, 12, 13, 18, 19],
[2, 3, 4, 8, 9, 10, 14, 15, 16, 20, 0, 1],
[5, 11, 17, 2],
]
sequence_lengths = [8, 10, 13]
predictions = np.random.normal(size=(num_samples, 13))
gatherer = DistributedTensorGatherer(world_size=world_size, num_samples=num_samples)
for indices, seq_length in zip(input_indices, sequence_lengths):
gatherer.add_arrays(predictions[indices, :seq_length])
result = gatherer.finalize()
# Remove the extra samples added at the end for a round multiple of num processes.
actual_indices = [input_indices[0], input_indices[1][:-2], input_indices[2][:-1]]
for indices, seq_length in zip(actual_indices, sequence_lengths):
self.assertTrue(np.array_equal(result[indices, :seq_length], predictions[indices, :seq_length]))
# With nested tensors
predictions = np.random.normal(size=(num_samples, 13))
gatherer = DistributedTensorGatherer(world_size=world_size, num_samples=num_samples)
for indices, seq_length in zip(input_indices, sequence_lengths):
gatherer.add_arrays([predictions[indices, :seq_length], predictions[indices]])
result = gatherer.finalize()
for indices, seq_length in zip(actual_indices, sequence_lengths):
self.assertTrue(np.array_equal(result[0][indices, :seq_length], predictions[indices, :seq_length]))
self.assertTrue(np.array_equal(result[1], predictions))
# Check if works if varying seq_length is second
gatherer = DistributedTensorGatherer(world_size=world_size, num_samples=num_samples)
for indices, seq_length in zip(input_indices, sequence_lengths):
gatherer.add_arrays([predictions[indices], predictions[indices, :seq_length]])
result = gatherer.finalize()
self.assertTrue(np.array_equal(result[0], predictions))
for indices, seq_length in zip(actual_indices, sequence_lengths):
self.assertTrue(np.array_equal(result[1][indices, :seq_length], predictions[indices, :seq_length]))
def test_label_smoothing(self):
epsilon = 0.1<|fim▁hole|> random_logits = torch.randn(4, 5, num_labels)
random_labels = torch.randint(0, num_labels, (4, 5))
loss = nn.functional.cross_entropy(random_logits.view(-1, num_labels), random_labels.view(-1))
model_output = SequenceClassifierOutput(logits=random_logits)
label_smoothed_loss = LabelSmoother(0.1)(model_output, random_labels)
log_probs = -nn.functional.log_softmax(random_logits, dim=-1)
expected_loss = (1 - epsilon) * loss + epsilon * log_probs.mean()
self.assertTrue(torch.allclose(label_smoothed_loss, expected_loss))
# With a few -100 labels
random_labels[0, 1] = -100
random_labels[2, 1] = -100
random_labels[2, 3] = -100
loss = nn.functional.cross_entropy(random_logits.view(-1, num_labels), random_labels.view(-1))
model_output = SequenceClassifierOutput(logits=random_logits)
label_smoothed_loss = LabelSmoother(0.1)(model_output, random_labels)
log_probs = -nn.functional.log_softmax(random_logits, dim=-1)
# Mask the log probs with the -100 labels
log_probs[0, 1] = 0.0
log_probs[2, 1] = 0.0
log_probs[2, 3] = 0.0
expected_loss = (1 - epsilon) * loss + epsilon * log_probs.sum() / (num_labels * 17)
self.assertTrue(torch.allclose(label_smoothed_loss, expected_loss))
def test_group_by_length(self):
# Get some inputs of random lengths
lengths = torch.randint(0, 25, (100,)).tolist()
# Put one bigger than the others to check it ends up in first position
lengths[32] = 50
indices = list(LengthGroupedSampler(4, lengths=lengths))
# The biggest element should be first
self.assertEqual(lengths[indices[0]], 50)
# The indices should be a permutation of range(100)
self.assertEqual(list(sorted(indices)), list(range(100)))
def test_group_by_length_with_dict(self):
# Get some inputs of random lengths
data = []
for _ in range(6):
input_ids = torch.randint(0, 25, (100,)).tolist()
data.append({"input_ids": input_ids})
# Put one bigger than the others to check it ends up in first position
data[3]["input_ids"] = torch.randint(0, 25, (105,)).tolist()
indices = list(LengthGroupedSampler(4, dataset=data))
# The biggest element should be first
self.assertEqual(len(data[indices[0]]["input_ids"]), 105)
# The indices should be a permutation of range(6)
self.assertEqual(list(sorted(indices)), list(range(6)))
def test_group_by_length_with_batch_encoding(self):
# Get some inputs of random lengths
data = []
for _ in range(6):
input_ids = torch.randint(0, 25, (100,)).tolist()
data.append(BatchEncoding({"input_ids": input_ids}))
# Put one bigger than the others to check it ends up in first position
data[3]["input_ids"] = torch.randint(0, 25, (105,)).tolist()
indices = list(LengthGroupedSampler(4, dataset=data))
# The biggest element should be first
self.assertEqual(len(data[indices[0]]["input_ids"]), 105)
# The indices should be a permutation of range(6)
self.assertEqual(list(sorted(indices)), list(range(6)))
def test_distributed_length_grouped(self):
# Get some inputs of random lengths
lengths = torch.randint(0, 25, (100,)).tolist()
# Put one bigger than the others to check it ends up in first position
lengths[32] = 50
indices_process_0 = list(DistributedLengthGroupedSampler(4, num_replicas=2, rank=0, lengths=lengths))
indices_process_1 = list(DistributedLengthGroupedSampler(4, num_replicas=2, rank=1, lengths=lengths))
# The biggest element should be first
self.assertEqual(lengths[indices_process_0[0]], 50)
# The indices should be a permutation of range(100)
self.assertEqual(list(sorted(indices_process_0 + indices_process_1)), list(range(100)))
def test_get_parameter_names(self):
model = nn.Sequential(TstLayer(128), nn.ModuleList([TstLayer(128), TstLayer(128)]))
# fmt: off
self.assertEqual(
get_parameter_names(model, [nn.LayerNorm]),
['0.linear1.weight', '0.linear1.bias', '0.linear2.weight', '0.linear2.bias', '0.bias', '1.0.linear1.weight', '1.0.linear1.bias', '1.0.linear2.weight', '1.0.linear2.bias', '1.0.bias', '1.1.linear1.weight', '1.1.linear1.bias', '1.1.linear2.weight', '1.1.linear2.bias', '1.1.bias']
)
# fmt: on
def test_distributed_sampler_with_loop(self):
batch_size = 16
for length in [23, 64, 123]:
dataset = list(range(length))
shard1 = DistributedSamplerWithLoop(dataset, batch_size, num_replicas=2, rank=0)
shard2 = DistributedSamplerWithLoop(dataset, batch_size, num_replicas=2, rank=1)
# Set seeds
shard1.set_epoch(0)
shard2.set_epoch(0)
# Sample
samples1 = list(shard1)
samples2 = list(shard2)
self.assertTrue(len(samples1) % batch_size == 0)
self.assertTrue(len(samples2) % batch_size == 0)
total = []
for sample1, sample2 in zip(samples1, samples2):
total += [sample1, sample2]
self.assertEqual(set(total[:length]), set(dataset))
self.assertEqual(set(total[length:]), set(total[: (len(total) - length)]))
def test_sequential_distributed_sampler(self):
batch_size = 16
for length in [23, 64, 123]:
dataset = list(range(length))
shard1 = SequentialDistributedSampler(dataset, num_replicas=2, rank=0)
shard2 = SequentialDistributedSampler(dataset, num_replicas=2, rank=1)
# Sample
samples1 = list(shard1)
samples2 = list(shard2)
total = samples1 + samples2
self.assertListEqual(total[:length], dataset)
self.assertListEqual(total[length:], dataset[: (len(total) - length)])
# With a batch_size passed
shard1 = SequentialDistributedSampler(dataset, num_replicas=2, rank=0, batch_size=batch_size)
shard2 = SequentialDistributedSampler(dataset, num_replicas=2, rank=1, batch_size=batch_size)
# Sample
samples1 = list(shard1)
samples2 = list(shard2)
self.assertTrue(len(samples1) % batch_size == 0)
self.assertTrue(len(samples2) % batch_size == 0)
total = samples1 + samples2
self.assertListEqual(total[:length], dataset)
self.assertListEqual(total[length:], dataset[: (len(total) - length)])
def check_iterable_dataset_shard(self, dataset, batch_size, drop_last, num_processes=2, epoch=0):
# Set the seed for the base dataset to get the proper reference.
dataset.generator.manual_seed(epoch)
reference = list(dataset)
shards = [
IterableDatasetShard(
dataset, batch_size=batch_size, drop_last=drop_last, num_processes=num_processes, process_index=i
)
for i in range(num_processes)
]
for shard in shards:
shard.set_epoch(epoch)
shard_lists = [list(shard) for shard in shards]
for shard in shard_lists:
# All shards have a number of samples that is a round multiple of batch size
self.assertTrue(len(shard) % batch_size == 0)
# All shards have the same number of samples
self.assertEqual(len(shard), len(shard_lists[0]))
for shard in shards:
# All shards know the total number of samples
self.assertEqual(shard.num_examples, len(reference))
observed = []
for idx in range(0, len(shard_lists[0]), batch_size):
for shard in shard_lists:
observed += shard[idx : idx + batch_size]
# If drop_last is False we loop through samples at the beginning to have a size that is a round multiple of
# batch_size
if not drop_last:
while len(reference) < len(observed):
reference += reference
self.assertListEqual(observed, reference[: len(observed)])
# Check equivalence between IterableDataset and ShardSampler
dataset.generator.manual_seed(epoch)
reference = list(dataset)
sampler_shards = [
ShardSampler(
reference, batch_size=batch_size, drop_last=drop_last, num_processes=num_processes, process_index=i
)
for i in range(num_processes)
]
for shard, sampler_shard in zip(shard_lists, sampler_shards):
self.assertListEqual(shard, list(sampler_shard))
def test_iterable_dataset_shard(self):
dataset = RandomIterableDataset()
self.check_iterable_dataset_shard(dataset, 4, drop_last=True, num_processes=2, epoch=0)
self.check_iterable_dataset_shard(dataset, 4, drop_last=False, num_processes=2, epoch=0)
self.check_iterable_dataset_shard(dataset, 4, drop_last=True, num_processes=3, epoch=42)
self.check_iterable_dataset_shard(dataset, 4, drop_last=False, num_processes=3, epoch=42)
def test_iterable_dataset_shard_with_length(self):
sampler_shards = [
IterableDatasetShard(list(range(100)), batch_size=4, drop_last=True, num_processes=2, process_index=i)
for i in range(2)
]
# Build expected shards: each process will have batches of size 4 until there is not enough elements to
# form two full batches (so we stop at 96 = (100 // (4 * 2)) * 4)
expected_shards = [[], []]
current_shard = 0
for i in range(0, 96, 4):
expected_shards[current_shard].extend(list(range(i, i + 4)))
current_shard = 1 - current_shard
self.assertListEqual([list(shard) for shard in sampler_shards], expected_shards)
self.assertListEqual([len(shard) for shard in sampler_shards], [len(shard) for shard in expected_shards])
sampler_shards = [
IterableDatasetShard(list(range(100)), batch_size=4, drop_last=False, num_processes=2, process_index=i)
for i in range(2)
]
# When drop_last=False, we get two last full batches by looping back to the beginning.
expected_shards[0].extend(list(range(96, 100)))
expected_shards[1].extend(list(range(0, 4)))
self.assertListEqual([list(shard) for shard in sampler_shards], expected_shards)
self.assertListEqual([len(shard) for shard in sampler_shards], [len(shard) for shard in expected_shards])
def check_shard_sampler(self, dataset, batch_size, drop_last, num_processes=2):
shards = [
ShardSampler(
dataset, batch_size=batch_size, drop_last=drop_last, num_processes=num_processes, process_index=i
)
for i in range(num_processes)
]
shard_lists = [list(shard) for shard in shards]
for shard in shard_lists:
# All shards have a number of samples that is a round multiple of batch size
self.assertTrue(len(shard) % batch_size == 0)
# All shards have the same number of samples
self.assertEqual(len(shard), len(shard_lists[0]))
observed = []
for idx in range(0, len(shard_lists[0]), batch_size):
for shard in shard_lists:
observed += shard[idx : idx + batch_size]
# If drop_last is False we loop through samples at the beginning to have a size that is a round multiple of
# batch_size
reference = copy.copy(dataset)
if not drop_last:
while len(reference) < len(observed):
reference += reference
self.assertListEqual(observed, reference[: len(observed)])
def test_shard_sampler(self):
for n_elements in [64, 123]:
dataset = list(range(n_elements))
self.check_shard_sampler(dataset, 4, drop_last=True, num_processes=2)
self.check_shard_sampler(dataset, 4, drop_last=False, num_processes=2)
self.check_shard_sampler(dataset, 4, drop_last=True, num_processes=3)
self.check_shard_sampler(dataset, 4, drop_last=False, num_processes=3)<|fim▁end|>
|
num_labels = 12
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# default settings that can be made for a user.
from __future__ import unicode_literals
<|fim▁hole|># product_name = "ERPNext"
product_name = "letzERP"
user_defaults = {
"Company": "company",
"Territory": "territory"
}<|fim▁end|>
|
import frappe
|
<|file_name|>lasduplicate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
lasduplicate.py
---------------------
Date : September 2013
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Martin Isenburg'
__date__ = 'September 2013'
__copyright__ = '(C) 2013, Martin Isenburg'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from LAStoolsUtils import LAStoolsUtils
from LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterFile
class lasduplicate(LAStoolsAlgorithm):
LOWEST_Z = "LOWEST_Z"
UNIQUE_XYZ = "UNIQUE_XYZ"
SINGLE_RETURNS = "SINGLE_RETURNS"
RECORD_REMOVED = "RECORD_REMOVED"
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('lasduplicate')
self.group, self.i18n_group = self.trAlgorithm('LAStools')
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParameter(ParameterBoolean(lasduplicate.LOWEST_Z,
self.tr("keep duplicate with lowest z coordinate"), False))
self.addParameter(ParameterBoolean(lasduplicate.UNIQUE_XYZ,
self.tr("only remove duplicates in x y and z"), False))
self.addParameter(ParameterBoolean(lasduplicate.SINGLE_RETURNS,
self.tr("mark surviving duplicate as single return"), False))
self.addParameter(ParameterFile(lasduplicate.RECORD_REMOVED,
self.tr("record removed duplicates to LAS/LAZ file")))
self.addParametersPointOutputGUI()
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "lasduplicate")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
if self.getParameterValue(lasduplicate.LOWEST_Z):
commands.append("-lowest_z")
if self.getParameterValue(lasduplicate.UNIQUE_XYZ):
commands.append("-unique_xyz")
if self.getParameterValue(lasduplicate.SINGLE_RETURNS):
commands.append("-single_returns")
record_removed = self.getParameterValue(lasduplicate.RECORD_REMOVED)
if record_removed is not None and record_removed != "":
commands.append("-record_removed")<|fim▁hole|> commands.append(record_removed)
self.addParametersPointOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)<|fim▁end|>
| |
<|file_name|>MacPaginationVerticalScrollBarUI.java<|end_file_name|><|fim▁begin|>package org.openswing.swing.table.client;
import java.awt.*;
import javax.swing.*;
import javax.swing.plaf.*;
import com.sun.java.swing.plaf.mac.*;
/**
* <p>Title: OpenSwing Framework</p>
* <p>Description: Vertical scrollbar UI, used inside the pagination vertical scrollbar of the grid control, for Mac LnF.</p>
* <p>Copyright: Copyright (C) 2006 Mauro Carniel</p>
*
* <p> This file is part of OpenSwing Framework.
* This library is free software; you can redistribute it and/or
* modify it under the terms of the (LGPL) Lesser General Public
* License as published by the Free Software Foundation;
*
* GNU LESSER GENERAL PUBLIC LICENSE
* Version 2.1, February 1999
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* The author may be contacted at:
* [email protected]</p>
*
* @author Mauro Carniel
* @version 1.0
*/
public class MacPaginationVerticalScrollBarUI extends MacScrollBarUI implements PaginationVerticalScrollbarUI {
protected JButton nextPgButton;
protected JButton prevPgButton;
public MacPaginationVerticalScrollBarUI() {
super();
}
public static ComponentUI createUI(JComponent c) {
return new MacPaginationVerticalScrollBarUI();
}
protected JButton createPageButton(int orientation) {
return new PageArrowButton(orientation);
}
protected void installDefaults() {
super.installDefaults();
nextPgButton = createPageButton(SOUTH);
prevPgButton = createPageButton(NORTH);
scrollbar.add(prevPgButton);
scrollbar.add(nextPgButton);
}
protected void layoutVScrollbar(JScrollBar sb) {
Dimension sbSize = sb.getSize();
Insets sbInsets = sb.getInsets();
/*
* Width and left edge of the buttons and thumb.
*/
int itemW = sbSize.width - (sbInsets.left + sbInsets.right);
int itemX = sbInsets.left;
/* Nominal locations of the buttons, assuming their preferred
* size will fit.
*/
int prevPgButtonH = prevPgButton.getPreferredSize().height;
int prevPgButtonY = sbInsets.top;
int decrButtonH = decrButton.getPreferredSize().height;
int decrButtonY = prevPgButtonY+prevPgButtonH;
int incrButtonH = incrButton.getPreferredSize().height;
int nextPgButtonH = nextPgButton.getPreferredSize().height;
int incrButtonY = sbSize.height - (sbInsets.bottom + incrButtonH+nextPgButtonH);
int nextPgButtonY = sbSize.height - (sbInsets.bottom + nextPgButtonH);
/* The thumb must fit within the height left over after we
* subtract the preferredSize of the buttons and the insets.
*/
int sbInsetsH = sbInsets.top + sbInsets.bottom;
int sbButtonsH = decrButtonH + incrButtonH +prevPgButtonH +nextPgButtonH;
float trackH = sbSize.height - (sbInsetsH + sbButtonsH);
/* Compute the height and origin of the thumb. The case
* where the thumb is at the bottom edge is handled specially
* to avoid numerical problems in computing thumbY. Enforce
* the thumbs min/max dimensions. If the thumb doesn't
* fit in the track (trackH) we'll hide it later.
*/
float min = sb.getMinimum();
float extent = sb.getVisibleAmount();
float range = sb.getMaximum() - min;
float value = sb.getValue();
int thumbH = (range <= 0)
? getMaximumThumbSize().height : (int)(trackH * (extent / range));
thumbH = Math.max(thumbH, getMinimumThumbSize().height);
thumbH = Math.min(thumbH, getMaximumThumbSize().height);
int thumbY = incrButtonY - thumbH;
if (sb.getValue() < (sb.getMaximum() - sb.getVisibleAmount())) {
float thumbRange = trackH - thumbH;
thumbY = (int)(0.5f + (thumbRange * ((value - min) / (range - extent))));
thumbY += decrButtonY + decrButtonH;
}
/* If the buttons don't fit, allocate half of the available
* space to each and move the lower one (incrButton) down.
*/
int sbAvailButtonH = (sbSize.height - sbInsetsH);
if (sbAvailButtonH < sbButtonsH) {
incrButtonH = decrButtonH = sbAvailButtonH / 2;
incrButtonY = sbSize.height - (sbInsets.bottom + incrButtonH);
}
prevPgButton.setBounds(itemX, prevPgButtonY, itemW, prevPgButtonH);
decrButton.setBounds(itemX, decrButtonY, itemW, decrButtonH);
incrButton.setBounds(itemX, incrButtonY, itemW, incrButtonH);
nextPgButton.setBounds(itemX, nextPgButtonY, itemW, nextPgButtonH);
/* Update the trackRect field.
*/
int itrackY = decrButtonY + decrButtonH;
int itrackH = incrButtonY - itrackY;
trackRect.setBounds(itemX, itrackY, itemW, itrackH);
/* If the thumb isn't going to fit, zero it's bounds. Otherwise
* make sure it fits between the buttons. Note that setting the
* thumbs bounds will cause a repaint.
*/
if(thumbH >= (int)trackH) {
setThumbBounds(0, 0, 0, 0);
}
else {
if ((thumbY + thumbH) > incrButtonY) {
thumbY = incrButtonY - thumbH;
}
if (thumbY < (decrButtonY + decrButtonH)) {
thumbY = decrButtonY + decrButtonH + 1;
}
setThumbBounds(itemX, thumbY, itemW, thumbH);
}
}
protected void installListeners() {
super.installListeners();
}
protected void uninstallListeners() {
super.uninstallListeners();
}
public JButton getNextPgButton() {
return nextPgButton;
}
public JButton getDecrButton() {
return decrButton;
}
public JButton getIncrButton() {
return incrButton;
}
public JButton getPrevPgButton() {
return prevPgButton;
}
/**
* <p>Title: OpenSwing Framework</p>
* <p>Description: Inner class used to render the scrollbar buttons.</p>
* @author Mauro Carniel
* @version 1.0
*/
class PageArrowButton extends MacScrollButton {
private Color background = UIManager.getColor("ScrollBar.arrowBackground");
private Color highlight = UIManager.getColor("ScrollBar.arrowHighlight");
private Color shadow = UIManager.getColor("ScrollBar.arrowShadow");
private Color pressedBackground = UIManager.getColor("ScrollBar.pressedArrowBackground");
private Color pressedHighlight = UIManager.getColor("ScrollBar.pressedArrowHighlight");
private Color pressedShadow = UIManager.getColor("ScrollBar.pressedArrowShadow");
private Color arrowColor = UIManager.getColor("ScrollBar.arrowColor");
private int buttonWidth;
private final ColorUIResource gray0 = new ColorUIResource(238, 238, 238);
private final ColorUIResource gray3 = new ColorUIResource(187, 187, 187);
private final ColorUIResource gray6 = new ColorUIResource(136, 136, 136);
private final ColorUIResource gray9 = new ColorUIResource(85, 85, 85);
public PageArrowButton(int direction) {
super(direction,incrButton.getPreferredSize().width);
buttonWidth = incrButton.getPreferredSize().width;
}
public void paint(Graphics g) {
if (background==null)
background = Color.white;
if (highlight==null)
highlight = new Color(240,240,240);
if (shadow==null)
shadow = new Color(20,20,20);
if (pressedHighlight==null)
pressedHighlight = new Color(200,200,200);
if (pressedShadow==null)
pressedShadow = new Color(10,10,10);
if (arrowColor==null)
arrowColor = new Color(1,1,1);
ButtonModel buttonmodel = getModel();
boolean flag = buttonmodel.isArmed() && buttonmodel.isPressed();
if(isEnabled())
g.setColor(flag ? pressedBackground : background);
else
g.setColor(gray0);
g.fillRect(0, 0, buttonWidth, buttonWidth);
g.setColor(flag ? pressedShadow : highlight);
g.drawLine(0, 0, buttonWidth - 2, 0);
g.drawLine(0, 0, 0, buttonWidth - 2);
g.setColor(flag ? pressedHighlight : shadow);
g.drawLine(buttonWidth - 1, 1, buttonWidth - 1, buttonWidth - 1);
g.drawLine(1, buttonWidth - 1, buttonWidth - 1, buttonWidth - 1);
g.setColor(((Color) (isEnabled() ? arrowColor : ((Color) (gray6)))));
int h = -3;
switch(getDirection())
{
case 1: // '\001'
g.drawLine(6, 5+h, 7, 5+h);
g.drawLine(5, 6+h, 8, 6+h);
g.drawLine(4, 7+h, 9, 7+h);
g.drawLine(3, 8+h, 10, 8+h);
h = getHeight()-14;
g.drawLine(6, 5+h, 7, 5+h);
g.drawLine(5, 6+h, 8, 6+h);
g.drawLine(4, 7+h, 9, 7+h);
g.drawLine(3, 8+h, 10, 8+h);
<|fim▁hole|> g.drawLine(4, 6+h, 9, 6+h);
g.drawLine(5, 7+h, 8, 7+h);
g.drawLine(6, 8+h, 7, 8+h);
h = getHeight()-14;
g.drawLine(3, 5+h, 10, 5+h);
g.drawLine(4, 6+h, 9, 6+h);
g.drawLine(5, 7+h, 8, 7+h);
g.drawLine(6, 8+h, 7, 8+h);
break;
}
}
}
}<|fim▁end|>
|
break;
case 5: // '\005'
g.drawLine(3, 5+h, 10, 5+h);
|
<|file_name|>ForceReply.py<|end_file_name|><|fim▁begin|>__author__ = 'harsha'
class ForceReply(object):
def __init__(self, force_reply, selective):
self.force_reply = force_reply
self.selective = selective
def get_force_reply(self):
return self.force_reply
def get_selective(self):
return self.selective
<|fim▁hole|><|fim▁end|>
|
def __str__(self):
return str(self.__dict__)
|
<|file_name|>service.py<|end_file_name|><|fim▁begin|>"""
.. module: lemur.certificate.service
:platform: Unix
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <[email protected]>
"""
import arrow
from flask import current_app
from sqlalchemy import func, or_, not_, cast, Integer
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes, serialization
from lemur import database
from lemur.extensions import metrics, signals
from lemur.plugins.base import plugins
from lemur.common.utils import generate_private_key, truthiness
from lemur.roles.models import Role
from lemur.domains.models import Domain
from lemur.authorities.models import Authority
from lemur.destinations.models import Destination
from lemur.certificates.models import Certificate
from lemur.notifications.models import Notification
from lemur.pending_certificates.models import PendingCertificate
from lemur.certificates.schemas import CertificateOutputSchema, CertificateInputSchema
from lemur.roles import service as role_service
csr_created = signals.signal('csr_created', "CSR generated")
csr_imported = signals.signal('csr_imported', "CSR imported from external source")
certificate_issued = signals.signal('certificate_issued', "Authority issued a certificate")
certificate_imported = signals.signal('certificate_imported', "Certificate imported from external source")
def get(cert_id):
"""
Retrieves certificate by its ID.
:param cert_id:
:return:
"""
return database.get(Certificate, cert_id)
def get_by_name(name):
"""
Retrieves certificate by its Name.
:param name:
:return:
"""
return database.get(Certificate, name, field='name')
def get_by_serial(serial):
"""
Retrieves certificate by it's Serial.
:param serial:
:return:
"""
if isinstance(serial, int):
# although serial is a number, the DB column is String(128)<|fim▁hole|> serial = str(serial)
return Certificate.query.filter(Certificate.serial == serial).all()
def delete(cert_id):
"""
Delete's a certificate.
:param cert_id:
"""
database.delete(get(cert_id))
def get_all_certs():
"""
Retrieves all certificates within Lemur.
:return:
"""
return Certificate.query.all()
def get_all_pending_cleaning(source):
"""
Retrieves all certificates that are available for cleaning.
:param source:
:return:
"""
return Certificate.query.filter(Certificate.sources.any(id=source.id))\
.filter(not_(Certificate.endpoints.any())).all()
def get_all_pending_reissue():
"""
Retrieves all certificates that need to be rotated.
Must be X days from expiration, uses the certificates rotation
policy to determine how many days from expiration the certificate must be
for rotation to be pending.
:return:
"""
return Certificate.query.filter(Certificate.rotation == True)\
.filter(not_(Certificate.replaced.any()))\
.filter(Certificate.in_rotation_window == True).all() # noqa
def find_duplicates(cert):
"""
Finds certificates that already exist within Lemur. We do this by looking for
certificate bodies that are the same. This is the most reliable way to determine
if a certificate is already being tracked by Lemur.
:param cert:
:return:
"""
if cert['chain']:
return Certificate.query.filter_by(body=cert['body'].strip(), chain=cert['chain'].strip()).all()
else:
return Certificate.query.filter_by(body=cert['body'].strip(), chain=None).all()
def export(cert, export_plugin):
"""
Exports a certificate to the requested format. This format
may be a binary format.
:param export_plugin:
:param cert:
:return:
"""
plugin = plugins.get(export_plugin['slug'])
return plugin.export(cert.body, cert.chain, cert.private_key, export_plugin['pluginOptions'])
def update(cert_id, **kwargs):
"""
Updates a certificate
:param cert_id:
:return:
"""
cert = get(cert_id)
for key, value in kwargs.items():
setattr(cert, key, value)
return database.update(cert)
def create_certificate_roles(**kwargs):
# create an role for the owner and assign it
owner_role = role_service.get_by_name(kwargs['owner'])
if not owner_role:
owner_role = role_service.create(
kwargs['owner'],
description="Auto generated role based on owner: {0}".format(kwargs['owner'])
)
# ensure that the authority's owner is also associated with the certificate
if kwargs.get('authority'):
authority_owner_role = role_service.get_by_name(kwargs['authority'].owner)
return [owner_role, authority_owner_role]
return [owner_role]
def mint(**kwargs):
"""
Minting is slightly different for each authority.
Support for multiple authorities is handled by individual plugins.
"""
authority = kwargs['authority']
issuer = plugins.get(authority.plugin_name)
# allow the CSR to be specified by the user
if not kwargs.get('csr'):
csr, private_key = create_csr(**kwargs)
csr_created.send(authority=authority, csr=csr)
else:
csr = str(kwargs.get('csr'))
private_key = None
csr_imported.send(authority=authority, csr=csr)
cert_body, cert_chain, external_id = issuer.create_certificate(csr, kwargs)
return cert_body, private_key, cert_chain, external_id, csr
def import_certificate(**kwargs):
"""
Uploads already minted certificates and pulls the required information into Lemur.
This is to be used for certificates that are created outside of Lemur but
should still be tracked.
Internally this is used to bootstrap Lemur with external
certificates, and used when certificates are 'discovered' through various discovery
techniques. was still in aws.
:param kwargs:
"""
if not kwargs.get('owner'):
kwargs['owner'] = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')[0]
return upload(**kwargs)
def upload(**kwargs):
"""
Allows for pre-made certificates to be imported into Lemur.
"""
roles = create_certificate_roles(**kwargs)
if kwargs.get('roles'):
kwargs['roles'] += roles
else:
kwargs['roles'] = roles
if kwargs.get('private_key'):
private_key = kwargs['private_key']
if not isinstance(private_key, bytes):
kwargs['private_key'] = private_key.encode('utf-8')
cert = Certificate(**kwargs)
cert = database.create(cert)
kwargs['creator'].certificates.append(cert)
cert = database.update(cert)
certificate_imported.send(certificate=cert, authority=cert.authority)
return cert
def create(**kwargs):
"""
Creates a new certificate.
"""
cert_body, private_key, cert_chain, external_id, csr = mint(**kwargs)
kwargs['body'] = cert_body
kwargs['private_key'] = private_key
kwargs['chain'] = cert_chain
kwargs['external_id'] = external_id
kwargs['csr'] = csr
roles = create_certificate_roles(**kwargs)
if kwargs.get('roles'):
kwargs['roles'] += roles
else:
kwargs['roles'] = roles
if cert_body:
cert = Certificate(**kwargs)
kwargs['creator'].certificates.append(cert)
else:
cert = PendingCertificate(**kwargs)
kwargs['creator'].pending_certificates.append(cert)
cert.authority = kwargs['authority']
database.commit()
if isinstance(cert, Certificate):
certificate_issued.send(certificate=cert, authority=cert.authority)
metrics.send('certificate_issued', 'counter', 1, metric_tags=dict(owner=cert.owner, issuer=cert.issuer))
return cert
def render(args):
"""
Helper function that allows use to render our REST Api.
:param args:
:return:
"""
query = database.session_query(Certificate)
time_range = args.pop('time_range')
destination_id = args.pop('destination_id')
notification_id = args.pop('notification_id', None)
show = args.pop('show')
# owner = args.pop('owner')
# creator = args.pop('creator') # TODO we should enabling filtering by owner
filt = args.pop('filter')
if filt:
terms = filt.split(';')
term = '%{0}%'.format(terms[1])
# Exact matches for quotes. Only applies to name, issuer, and cn
if terms[1].startswith('"') and terms[1].endswith('"'):
term = terms[1][1:-1]
if 'issuer' in terms:
# we can't rely on issuer being correct in the cert directly so we combine queries
sub_query = database.session_query(Authority.id)\
.filter(Authority.name.ilike(term))\
.subquery()
query = query.filter(
or_(
Certificate.issuer.ilike(term),
Certificate.authority_id.in_(sub_query)
)
)
elif 'destination' in terms:
query = query.filter(Certificate.destinations.any(Destination.id == terms[1]))
elif 'notify' in filt:
query = query.filter(Certificate.notify == truthiness(terms[1]))
elif 'active' in filt:
query = query.filter(Certificate.active == truthiness(terms[1]))
elif 'cn' in terms:
query = query.filter(
or_(
Certificate.cn.ilike(term),
Certificate.domains.any(Domain.name.ilike(term))
)
)
elif 'id' in terms:
query = query.filter(Certificate.id == cast(terms[1], Integer))
elif 'name' in terms:
query = query.filter(
or_(
Certificate.name.ilike(term),
Certificate.domains.any(Domain.name.ilike(term)),
Certificate.cn.ilike(term),
)
)
else:
query = database.filter(query, Certificate, terms)
if show:
sub_query = database.session_query(Role.name).filter(Role.user_id == args['user'].id).subquery()
query = query.filter(
or_(
Certificate.user_id == args['user'].id,
Certificate.owner.in_(sub_query)
)
)
if destination_id:
query = query.filter(Certificate.destinations.any(Destination.id == destination_id))
if notification_id:
query = query.filter(Certificate.notifications.any(Notification.id == notification_id))
if time_range:
to = arrow.now().replace(weeks=+time_range).format('YYYY-MM-DD')
now = arrow.now().format('YYYY-MM-DD')
query = query.filter(Certificate.not_after <= to).filter(Certificate.not_after >= now)
return database.sort_and_page(query, Certificate, args)
def create_csr(**csr_config):
"""
Given a list of domains create the appropriate csr
for those domains
:param csr_config:
"""
private_key = generate_private_key(csr_config.get('key_type'))
builder = x509.CertificateSigningRequestBuilder()
name_list = [x509.NameAttribute(x509.OID_COMMON_NAME, csr_config['common_name'])]
if current_app.config.get('LEMUR_OWNER_EMAIL_IN_SUBJECT', True):
name_list.append(x509.NameAttribute(x509.OID_EMAIL_ADDRESS, csr_config['owner']))
if 'organization' in csr_config and csr_config['organization'].strip():
name_list.append(x509.NameAttribute(x509.OID_ORGANIZATION_NAME, csr_config['organization']))
if 'organizational_unit' in csr_config and csr_config['organizational_unit'].strip():
name_list.append(x509.NameAttribute(x509.OID_ORGANIZATIONAL_UNIT_NAME, csr_config['organizational_unit']))
if 'country' in csr_config and csr_config['country'].strip():
name_list.append(x509.NameAttribute(x509.OID_COUNTRY_NAME, csr_config['country']))
if 'state' in csr_config and csr_config['state'].strip():
name_list.append(x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, csr_config['state']))
if 'location' in csr_config and csr_config['location'].strip():
name_list.append(x509.NameAttribute(x509.OID_LOCALITY_NAME, csr_config['location']))
builder = builder.subject_name(x509.Name(name_list))
extensions = csr_config.get('extensions', {})
critical_extensions = ['basic_constraints', 'sub_alt_names', 'key_usage']
noncritical_extensions = ['extended_key_usage']
for k, v in extensions.items():
if v:
if k in critical_extensions:
current_app.logger.debug('Adding Critical Extension: {0} {1}'.format(k, v))
if k == 'sub_alt_names':
if v['names']:
builder = builder.add_extension(v['names'], critical=True)
else:
builder = builder.add_extension(v, critical=True)
if k in noncritical_extensions:
current_app.logger.debug('Adding Extension: {0} {1}'.format(k, v))
builder = builder.add_extension(v, critical=False)
ski = extensions.get('subject_key_identifier', {})
if ski.get('include_ski', False):
builder = builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(private_key.public_key()),
critical=False
)
request = builder.sign(
private_key, hashes.SHA256(), default_backend()
)
# serialize our private key and CSR
private_key = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.TraditionalOpenSSL, # would like to use PKCS8 but AWS ELBs don't like it
encryption_algorithm=serialization.NoEncryption()
)
if isinstance(private_key, bytes):
private_key = private_key.decode('utf-8')
csr = request.public_bytes(
encoding=serialization.Encoding.PEM
).decode('utf-8')
return csr, private_key
def stats(**kwargs):
"""
Helper that defines some useful statistics about certifications.
:param kwargs:
:return:
"""
if kwargs.get('metric') == 'not_after':
start = arrow.utcnow()
end = start.replace(weeks=+32)
items = database.db.session.query(Certificate.issuer, func.count(Certificate.id))\
.group_by(Certificate.issuer)\
.filter(Certificate.not_after <= end.format('YYYY-MM-DD')) \
.filter(Certificate.not_after >= start.format('YYYY-MM-DD')).all()
else:
attr = getattr(Certificate, kwargs.get('metric'))
query = database.db.session.query(attr, func.count(attr))
items = query.group_by(attr).all()
keys = []
values = []
for key, count in items:
keys.append(key)
values.append(count)
return {'labels': keys, 'values': values}
def get_account_number(arn):
"""
Extract the account number from an arn.
:param arn: IAM SSL arn
:return: account number associated with ARN
"""
return arn.split(":")[4]
def get_name_from_arn(arn):
"""
Extract the certificate name from an arn.
:param arn: IAM SSL arn
:return: name of the certificate as uploaded to AWS
"""
return arn.split("/", 1)[1]
def calculate_reissue_range(start, end):
"""
Determine what the new validity_start and validity_end dates should be.
:param start:
:param end:
:return:
"""
span = end - start
new_start = arrow.utcnow()
new_end = new_start + span
return new_start, arrow.get(new_end)
def get_certificate_primitives(certificate):
"""
Retrieve key primitive from a certificate such that the certificate
could be recreated with new expiration or be used to build upon.
:param certificate:
:return: dict of certificate primitives, should be enough to effectively re-issue
certificate via `create`.
"""
start, end = calculate_reissue_range(certificate.not_before, certificate.not_after)
data = CertificateInputSchema().load(CertificateOutputSchema().dump(certificate).data).data
# we can't quite tell if we are using a custom name, as this is an automated process (typically)
# we will rely on the Lemur generated name
data.pop('name', None)
# TODO this can be removed once we migrate away from cn
data['cn'] = data['common_name']
# needed until we move off not_*
data['not_before'] = start
data['not_after'] = end
data['validity_start'] = start
data['validity_end'] = end
return data
def reissue_certificate(certificate, replace=None, user=None):
"""
Reissue certificate with the same properties of the given certificate.
:param certificate:
:param replace:
:param user:
:return:
"""
primitives = get_certificate_primitives(certificate)
if not user:
primitives['creator'] = certificate.user
else:
primitives['creator'] = user
if replace:
primitives['replaces'] = [certificate]
new_cert = create(**primitives)
return new_cert<|fim▁end|>
| |
<|file_name|>matrix.hpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2009-2012, Jack Poulson
All rights reserved.
This file is part of Elemental.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the owner nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef ELEMENTAL_MATRIX_HPP
#define ELEMENTAL_MATRIX_HPP 1
#include "elemental/core/environment.hpp"
namespace elem {
// Matrix base for arbitrary rings
template<typename T,typename Int=int>
class Matrix
{
public:
//
// Constructors
//
Matrix();
Matrix( Int height, Int width );
Matrix( Int height, Int width, Int ldim );
Matrix( Int height, Int width, const T* buffer, Int ldim );
Matrix( Int height, Int width, T* buffer, Int ldim );
Matrix( const Matrix<T,Int>& A );
//
// Destructor
//
~Matrix();
//
// Basic information
//
Int Height() const;
Int Width() const;
Int DiagonalLength( Int offset=0 ) const;
Int LDim() const;
Int MemorySize() const;
T* Buffer();
T* Buffer( Int i, Int j );
const T* LockedBuffer() const;
const T* LockedBuffer( Int i, Int j ) const;
//
// I/O
//
void Print( const std::string msg="" ) const;
void Print( std::ostream& os, const std::string msg="" ) const;
//
// Entry manipulation
//
T Get( Int i, Int j ) const;
void Set( Int i, Int j, T alpha );
void Update( Int i, Int j, T alpha );
void GetDiagonal( Matrix<T,Int>& d, Int offset=0 ) const;
void SetDiagonal( const Matrix<T,Int>& d, Int offset=0 );
void UpdateDiagonal( const Matrix<T,Int>& d, Int offset=0 );
//
// Though the following routines are meant for complex data, all but four
// logically apply to real data.
//
typename Base<T>::type GetReal( Int i, Int j ) const;
typename Base<T>::type GetImag( Int i, Int j ) const;
void SetReal( Int i, Int j, typename Base<T>::type alpha );
// Only valid for complex data
void SetImag( Int i, Int j, typename Base<T>::type alpha );
void UpdateReal( Int i, Int j, typename Base<T>::type alpha );
// Only valid for complex data
void UpdateImag( Int i, Int j, typename Base<T>::type alpha );
void GetRealDiagonal
( Matrix<typename Base<T>::type>& d, Int offset=0 ) const;
void GetImagDiagonal
( Matrix<typename Base<T>::type>& d, Int offset=0 ) const;
void SetRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
// Only valid for complex data
void SetImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
void UpdateRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
// Only valid for complex data
void UpdateImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset=0 );
//
// Viewing other matrix instances (or buffers)
//
bool Viewing() const;
bool LockedView() const;
void View( Int height, Int width, T* buffer, Int ldim );
void View( Matrix<T,Int>& A);
void View( Matrix<T,Int>& A, Int i, Int j, Int height, Int width );
void View1x2( Matrix<T,Int>& AL, Matrix<T,Int>& AR );
void View2x1( Matrix<T,Int>& AT,
Matrix<T,Int>& AB );
void View2x2( Matrix<T,Int>& ATL, Matrix<T,Int>& ATR,
Matrix<T,Int>& ABL, Matrix<T,Int>& ABR );
void LockedView( Int height, Int width, const T* buffer, Int ldim );
void LockedView( const Matrix<T,Int>& A );
void LockedView
( const Matrix<T,Int>& A, Int i, Int j, Int height, Int width );
void LockedView1x2
( const Matrix<T,Int>& AL, const Matrix<T,Int>& AR );
void LockedView2x1
( const Matrix<T,Int>& AT,
const Matrix<T,Int>& AB );
void LockedView2x2
( const Matrix<T,Int>& ATL, const Matrix<T,Int>& ATR,
const Matrix<T,Int>& ABL, const Matrix<T,Int>& ABR );
//
// Utilities
//
const Matrix<T,Int>& operator=( const Matrix<T,Int>& A );
void Empty();
void ResizeTo( Int height, Int width );
void ResizeTo( Int height, Int width, Int ldim );
private:
bool viewing_, lockedView_;
Int height_, width_, ldim_;
T* data_;
const T* lockedData_;
Memory<T> memory_;
void AssertValidEntry( Int i, Int j ) const;
template<typename Z>
struct SetRealHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct SetRealHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct SetRealHelper;
template<typename Z>
struct SetImagHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct SetImagHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct SetImagHelper;
template<typename Z>
struct UpdateRealHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct UpdateRealHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct UpdateRealHelper;
template<typename Z>
struct UpdateImagHelper
{
static void Func( Matrix<Z>& parent, Int i, Int j, Z alpha );
};
template<typename Z>
struct UpdateImagHelper<Complex<Z> >
{
static void Func( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha );
};
template<typename Z> friend struct UpdateImagHelper;
};
//----------------------------------------------------------------------------//
// Implementation begins here //
//----------------------------------------------------------------------------//
//
// Constructors
//
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix()
: viewing_(false), lockedView_(false),
height_(0), width_(0), data_(0), lockedData_(0), ldim_(1),
memory_()
{ }
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix( Int height, Int width )
: viewing_(false), lockedView_(false),
height_(height), width_(width), lockedData_(0), ldim_(std::max(height,1))
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
#endif
memory_.Require( ldim_*width );
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, Int ldim )
: viewing_(false), lockedView_(false),
height_(height), width_(width), lockedData_(0), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
#endif
memory_.Require( ldim*width );
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, const T* buffer, Int ldim )
: viewing_(true), lockedView_(true),
height_(height), width_(width), data_(0), lockedData_(buffer), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( Int height, Int width, T* buffer, Int ldim )
: viewing_(true), lockedView_(false),
height_(height), width_(width), data_(buffer), lockedData_(0), ldim_(ldim)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( ldim < height )
{
std::ostringstream msg;
msg << "Initialized with ldim(" << ldim << ") < "
<< "height(" << height << ").";
throw std::logic_error( msg.str() );
}
if( ldim == 0 )
throw std::logic_error
("Leading dimensions cannot be zero (for BLAS compatibility)");
PopCallStack();
#endif
}
template<typename T,typename Int>
inline
Matrix<T,Int>::Matrix
( const Matrix<T,Int>& A )
: viewing_(false), lockedView_(false),
height_(0), width_(0), data_(0), lockedData_(0), ldim_(1)
{
#ifndef RELEASE
PushCallStack("Matrix::Matrix( const Matrix& )");
#endif
if( &A != this )
*this = A;
else
throw std::logic_error
("You just tried to construct a Matrix with itself!");
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Destructor
//
template<typename T,typename Int>
inline
Matrix<T,Int>::~Matrix()
{ }
//
// Basic information
//
template<typename T,typename Int>
inline Int
Matrix<T,Int>::Height() const
{ return height_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::Width() const
{ return width_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::DiagonalLength( Int offset ) const
{ return elem::DiagonalLength(height_,width_,offset); }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::LDim() const
{ return ldim_; }
template<typename T,typename Int>
inline Int
Matrix<T,Int>::MemorySize() const
{ return memory_.Size(); }
template<typename T,typename Int>
inline T*
Matrix<T,Int>::Buffer()
{
#ifndef RELEASE
PushCallStack("Matrix::Buffer");
if( lockedView_ )
throw std::logic_error
("Cannot return non-const buffer of locked Matrix");
PopCallStack();
#endif
return data_;
}
template<typename T,typename Int>
inline const T*
Matrix<T,Int>::LockedBuffer() const
{
if( lockedView_ )
return lockedData_;
else<|fim▁hole|> return data_;
}
template<typename T,typename Int>
inline T*
Matrix<T,Int>::Buffer( Int i, Int j )
{
#ifndef RELEASE
PushCallStack("Matrix::Buffer");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( lockedView_ )
throw std::logic_error
("Cannot return non-const buffer of locked Matrix");
PopCallStack();
#endif
return &data_[i+j*ldim_];
}
template<typename T,typename Int>
inline const T*
Matrix<T,Int>::LockedBuffer( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::LockedBuffer");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
PopCallStack();
#endif
if( lockedView_ )
return &lockedData_[i+j*ldim_];
else
return &data_[i+j*ldim_];
}
//
// I/O
//
template<typename T,typename Int>
inline void
Matrix<T,Int>::Print( std::ostream& os, const std::string msg ) const
{
#ifndef RELEASE
PushCallStack("Matrix::Print");
#endif
if( msg != "" )
os << msg << std::endl;
const Int height = Height();
const Int width = Width();
for( Int i=0; i<height; ++i )
{
for( Int j=0; j<width; ++j )
os << Get(i,j) << " ";
os << std::endl;
}
os << std::endl;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Print( const std::string msg ) const
{ Print( std::cout, msg ); }
//
// Entry manipulation
//
template<typename T,typename Int>
inline T
Matrix<T,Int>::Get( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::Get");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return lockedData_[i+j*ldim_];
else
return data_[i+j*ldim_];
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Set( Int i, Int j, T alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::Set");
AssertValidEntry( i, j );
if( lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
#endif
data_[i+j*ldim_] = alpha;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Update( Int i, Int j, T alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::Update");
AssertValidEntry( i, j );
if( lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
#endif
data_[i+j*ldim_] += alpha;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetDiagonal( Matrix<T,Int>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1 ))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, Get(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, Get(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetDiagonal( const Matrix<T,Int>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
Set( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
Set( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateDiagonal( const Matrix<T,Int>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
Update( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
Update( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline typename Base<T>::type
Matrix<T,Int>::GetReal( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetReal");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return Real(lockedData_[i+j*ldim_]);
else
return Real(data_[i+j*ldim_]);
}
template<typename T,typename Int>
inline typename Base<T>::type
Matrix<T,Int>::GetImag( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetImag");
AssertValidEntry( i, j );
PopCallStack();
#endif
if( lockedData_ )
return Imag(lockedData_[i+j*ldim_]);
else
return Imag(data_[i+j*ldim_]);
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetReal
( Int i, Int j, typename Base<T>::type alpha )
{ SetRealHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetRealHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
parent.data_[i+j*parent.ldim_] = alpha;
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetRealHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Z beta = parent.data_[i+j*parent.ldim_].imag;
parent.data_[i+j*parent.ldim_] = Complex<Z>( alpha, beta );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetImag
( Int i, Int j, typename Base<T>::type alpha )
{ SetImagHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetImagHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagHelper::Func");
#endif
throw std::logic_error("Called complex-only routine with real datatype");
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::SetImagHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Z beta = parent.data_[i+j*parent.ldim_].real;
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta, alpha );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateReal
( Int i, Int j, typename Base<T>::type alpha )
{ UpdateRealHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateRealHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
parent.data_[i+j*parent.ldim_] += alpha;
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateRealHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Complex<Z> beta = parent.data_[i+j*parent.ldim_];
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta.real+alpha, beta.imag );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateImag
( Int i, Int j, typename Base<T>::type alpha )
{ UpdateImagHelper<T>::Func( *this, i, j, alpha ); }
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateImagHelper<Z>::Func
( Matrix<Z>& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagHelper::Func");
#endif
throw std::logic_error("Called complex-only routine with real datatype");
}
template<typename T,typename Int>
template<typename Z>
inline void
Matrix<T,Int>::UpdateImagHelper<Complex<Z> >::Func
( Matrix<Complex<Z> >& parent, Int i, Int j, Z alpha )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagHelper::Func");
parent.AssertValidEntry( i, j );
if( parent.lockedData_ )
throw std::logic_error("Cannot modify data of locked matrices");
PopCallStack();
#endif
const Complex<Z> beta = parent.data_[i+j*parent.ldim_];
parent.data_[i+j*parent.ldim_] = Complex<Z>( beta.real, beta.imag+alpha );
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetRealDiagonal
( Matrix<typename Base<T>::type>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetRealDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetReal(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetReal(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::GetImagDiagonal
( Matrix<typename Base<T>::type>& d, Int offset ) const
{
#ifndef RELEASE
PushCallStack("Matrix::GetImagDiagonal");
if( d.LockedView() )
throw std::logic_error("d must not be a locked view");
if( d.Viewing() &&
(d.Height() != DiagonalLength(offset) || d.Width() != 1))
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( !d.Viewing() )
d.ResizeTo( diagLength, 1 );
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetImag(j,j+offset) );
else
for( Int j=0; j<diagLength; ++j )
d.Set( j, 0, GetImag(j-offset,j) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetRealDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
SetReal( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
SetReal( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::SetImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::SetImagDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
if( !IsComplex<T>::val )
throw std::logic_error("Cannot set imaginary part of real matrix");
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
SetImag( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
SetImag( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateRealDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateRealDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
UpdateReal( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
UpdateReal( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::UpdateImagDiagonal
( const Matrix<typename Base<T>::type>& d, Int offset )
{
#ifndef RELEASE
PushCallStack("Matrix::UpdateImagDiagonal");
if( d.Height() != DiagonalLength(offset) || d.Width() != 1 )
throw std::logic_error("d is not a column-vector of the right length");
#endif
if( !IsComplex<T>::val )
throw std::logic_error("Cannot update imaginary part of real matrix");
const Int diagLength = DiagonalLength(offset);
if( offset >= 0 )
for( Int j=0; j<diagLength; ++j )
UpdateImag( j, j+offset, d.Get(j,0) );
else
for( Int j=0; j<diagLength; ++j )
UpdateImag( j-offset, j, d.Get(j,0) );
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Viewing other Matrix instances
//
template<typename T,typename Int>
inline bool
Matrix<T,Int>::Viewing() const
{ return viewing_; }
template<typename T,typename Int>
inline bool
Matrix<T,Int>::LockedView() const
{ return lockedView_; }
template<typename T,typename Int>
inline void
Matrix<T,Int>::View
( Int height, Int width, T* buffer, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::View(buffer)");
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = ldim;
data_ = buffer;
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View( Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::View(A)");
#endif
Empty();
height_ = A.Height();
width_ = A.Width();
ldim_ = A.LDim();
data_ = A.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView
( Int height, Int width, const T* buffer, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(buffer)");
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = ldim;
lockedData_ = buffer;
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView( const Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(A)");
#endif
Empty();
height_ = A.Height();
width_ = A.Width();
ldim_ = A.LDim();
lockedData_ = A.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View
( Matrix<T,Int>& A, Int i, Int j, Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::View(A,i,j,height,width)");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( (i+height) > A.Height() || (j+width) > A.Width() )
{
std::ostringstream msg;
msg << "Trying to view outside of a Matrix: "
<< "up to (" << i+height-1 << "," << j+width-1 << ") "
<< "of " << A.Height() << " x " << A.Width() << " Matrix.";
throw std::logic_error( msg.str().c_str() );
}
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = A.LDim();
data_ = A.Buffer(i,j);
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView
( const Matrix<T,Int>& A, Int i, Int j, Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView(A,i,j,height,width)");
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( (i+height) > A.Height() || (j+width) > A.Width() )
{
std::ostringstream msg;
msg << "Trying to view outside of a Matrix: "
<< "up to (" << i+height-1 << "," << j+width-1 << ") "
<< "of " << A.Height() << " x " << A.Width() << " Matrix.";
throw std::logic_error( msg.str().c_str() );
}
#endif
Empty();
height_ = height;
width_ = width;
ldim_ = A.LDim();
lockedData_ = A.LockedBuffer(i,j);
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View1x2( Matrix<T,Int>& AL, Matrix<T,Int>& AR )
{
#ifndef RELEASE
PushCallStack("Matrix::View1x2");
if( AL.Height() != AR.Height() )
throw std::logic_error("1x2 must have consistent height to combine");
if( AL.LDim() != AR.LDim() )
throw std::logic_error("1x2 must have consistent ldims to combine");
if( AR.Buffer() != (AL.Buffer()+AL.LDim()*AL.Width()) )
throw std::logic_error("1x2 must have contiguous memory");
#endif
Empty();
height_ = AL.Height();
width_ = AL.Width() + AR.Width();
ldim_ = AL.LDim();
data_ = AL.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView1x2( const Matrix<T,Int>& AL, const Matrix<T,Int>& AR )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView1x2");
if( AL.Height() != AR.Height() )
throw std::logic_error("1x2 must have consistent height to combine");
if( AL.LDim() != AR.LDim() )
throw std::logic_error("1x2 must have consistent ldims to combine");
if( AR.LockedBuffer() != (AL.LockedBuffer()+AL.LDim()*AL.Width()) )
throw std::logic_error("1x2 must have contiguous memory");
#endif
Empty();
height_ = AL.Height();
width_ = AL.Width() + AR.Width();
ldim_ = AL.LDim();
lockedData_ = AL.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View2x1
( Matrix<T,Int>& AT,
Matrix<T,Int>& AB )
{
#ifndef RELEASE
PushCallStack("Matrix::View2x1");
if( AT.Width() != AB.Width() )
throw std::logic_error("2x1 must have consistent width to combine");
if( AT.LDim() != AB.LDim() )
throw std::logic_error("2x1 must have consistent ldim to combine");
if( AB.Buffer() != (AT.Buffer() + AT.Height()) )
throw std::logic_error("2x1 must have contiguous memory");
#endif
Empty();
height_ = AT.Height() + AB.Height();
width_ = AT.Width();
ldim_ = AT.LDim();
data_ = AT.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView2x1
( const Matrix<T,Int>& AT,
const Matrix<T,Int>& AB )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView2x1");
if( AT.Width() != AB.Width() )
throw std::logic_error("2x1 must have consistent width to combine");
if( AT.LDim() != AB.LDim() )
throw std::logic_error("2x1 must have consistent ldim to combine");
if( AB.LockedBuffer() != (AT.LockedBuffer()+AT.Height()) )
throw std::logic_error("2x1 must have contiguous memory");
#endif
Empty();
height_ = AT.Height() + AB.Height();
width_ = AT.Width();
ldim_ = AT.LDim();
lockedData_ = AT.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::View2x2
( Matrix<T,Int>& ATL, Matrix<T,Int>& ATR,
Matrix<T,Int>& ABL, Matrix<T,Int>& ABR )
{
#ifndef RELEASE
PushCallStack("Matrix::View2x2");
if( ATL.Width() != ABL.Width() ||
ATR.Width() != ABR.Width() ||
ATL.Height() != ATR.Height() ||
ABL.Height() != ABR.Height() )
throw std::logic_error("2x2 must conform to combine");
if( ATL.LDim() != ATR.LDim() ||
ATR.LDim() != ABL.LDim() ||
ABL.LDim() != ABR.LDim() )
throw std::logic_error("2x2 must have consistent ldims to combine");
if( ABL.Buffer() != (ATL.Buffer() + ATL.Height()) ||
ABR.Buffer() != (ATR.Buffer() + ATR.Height()) ||
ATR.Buffer() != (ATL.Buffer() + ATL.LDim()*ATL.Width()) )
throw std::logic_error("2x2 must have contiguous memory");
#endif
Empty();
height_ = ATL.Height() + ABL.Height();
width_ = ATL.Width() + ATR.Width();
ldim_ = ATL.LDim();
data_ = ATL.Buffer();
viewing_ = true;
lockedView_ = false;
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::LockedView2x2
( const Matrix<T,Int>& ATL, const Matrix<T,Int>& ATR,
const Matrix<T,Int>& ABL, const Matrix<T,Int>& ABR )
{
#ifndef RELEASE
PushCallStack("Matrix::LockedView2x2");
if( ATL.Width() != ABL.Width() ||
ATR.Width() != ABR.Width() ||
ATL.Height() != ATR.Height() ||
ABL.Height() != ABR.Height() )
throw std::logic_error("2x2 must conform to combine");
if( ATL.LDim() != ATR.LDim() ||
ATR.LDim() != ABL.LDim() ||
ABL.LDim() != ABR.LDim() )
throw std::logic_error("2x2 must have consistent ldims to combine");
if( ABL.LockedBuffer() != (ATL.LockedBuffer() + ATL.Height()) ||
ABR.LockedBuffer() != (ATR.LockedBuffer() + ATR.Height()) ||
ATR.LockedBuffer() != (ATL.LockedBuffer() + ATL.LDim()*ATL.Width()) )
throw std::logic_error("2x2 must have contiguous memory");
#endif
Empty();
height_ = ATL.Height() + ABL.Height();
width_ = ATL.Width() + ATR.Width();
ldim_ = ATL.LDim();
lockedData_ = ATL.LockedBuffer();
viewing_ = true;
lockedView_ = true;
#ifndef RELEASE
PopCallStack();
#endif
}
//
// Utilities
//
template<typename T,typename Int>
inline const Matrix<T,Int>&
Matrix<T,Int>::operator=( const Matrix<T,Int>& A )
{
#ifndef RELEASE
PushCallStack("Matrix::operator=");
if( lockedView_ )
throw std::logic_error("Cannot assign to a locked view");
if( viewing_ && ( A.Height() != Height() || A.Width() != Width() ) )
throw std::logic_error
("Cannot assign to a view of different dimensions");
#endif
if( !viewing_ )
ResizeTo( A.Height(), A.Width() );
const Int height = Height();
const Int width = Width();
const Int ldim = LDim();
const Int ldimOfA = A.LDim();
const T* data = A.LockedBuffer();
#ifdef _OPENMP
#pragma omp parallel for
#endif
for( Int j=0; j<width; ++j )
MemCopy( &data_[j*ldim], &data[j*ldimOfA], height );
#ifndef RELEASE
PopCallStack();
#endif
return *this;
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::Empty()
{
memory_.Empty();
height_ = 0;
width_ = 0;
ldim_ = 1;
data_ = 0;
lockedData_ = 0;
viewing_ = false;
lockedView_ = false;
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::ResizeTo( Int height, Int width )
{
#ifndef RELEASE
PushCallStack("Matrix::ResizeTo(height,width)");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( viewing_ && (height>height_ || width>width_) )
throw std::logic_error("Cannot increase the size of a view");
#endif
// Only change the ldim when necessary. Simply 'shrink' our view if
// possible.
const Int minLDim = 1;
if( height > height_ || width > width_ )
ldim_ = std::max( height, minLDim );
height_ = height;
width_ = width;
memory_.Require(ldim_*width);
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::ResizeTo( Int height, Int width, Int ldim )
{
#ifndef RELEASE
PushCallStack("Matrix::ResizeTo(height,width,ldim)");
if( height < 0 || width < 0 )
throw std::logic_error("Height and width must be non-negative");
if( viewing_ && (height > height_ || width > width_ || ldim != ldim_) )
throw std::logic_error("Illogical ResizeTo on viewed data");
if( ldim < height )
{
std::ostringstream msg;
msg << "Tried to set ldim(" << ldim << ") < height (" << height << ")";
throw std::logic_error( msg.str().c_str() );
}
#endif
height_ = height;
width_ = width;
ldim_ = ldim;
memory_.Require(ldim*width);
data_ = memory_.Buffer();
#ifndef RELEASE
PopCallStack();
#endif
}
template<typename T,typename Int>
inline void
Matrix<T,Int>::AssertValidEntry( Int i, Int j ) const
{
#ifndef RELEASE
PushCallStack("Matrix::AssertValidEntry");
#endif
if( i < 0 || j < 0 )
throw std::logic_error("Indices must be non-negative");
if( i > this->Height() || j > this->Width() )
{
std::ostringstream msg;
msg << "Out of bounds: "
<< "(" << i << "," << j << ") of " << this->Height()
<< " x " << this->Width() << " Matrix.";
throw std::logic_error( msg.str() );
}
#ifndef RELEASE
PopCallStack();
#endif
}
} // namespace elem
#endif /* ELEMENTAL_MATRIX_HPP */<|fim▁end|>
| |
<|file_name|>XProjection.java<|end_file_name|><|fim▁begin|>/***************************************************************************
* Copyright (C) 2006 by Arnaud Desaedeleer *
* [email protected] *
* *
* This file is part of OpenOMR *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************/
package openomr.omr_engine;
import java.awt.image.BufferedImage;
/**
* The <code> XProjection </code> class will calculate the X-Projection of an image. The constructor
* is given a <code> BufferedImage </code> and then the <code> calcXProjection </method> is invoked
* to calculate the X-Projection of the <code> BufferedImage </code>.
* <p>
* The <code> XProjection </code> class is used as follows:
* <p>
* <code>
* XProjection xProj = new XProjection(BufferedImage); <br>
* xProj.calcXProjection(startH, endH, startW, endW); <br>
* </code>
* <p>
* Calling the <code> calcXProjection </code> method will place the X-Projection of the <code> BufferedImage </code>
* in a int[] array which can be obtained by calling the <code> getYProjection </code> method.
* <p>
*
* @author Arnaud Desaedeleer
* @version 1.0
*/
public class XProjection
{
private int xProjection[];
private int size;
private BufferedImage buffImage;
public XProjection(BufferedImage buffImage)
{
this.buffImage = buffImage;
}
/**
* Cacluate the X-Projection of the BufferedImage
* @param startH Desired start Y-Coordinate of the BufferedImage
* @param endH Desired end Y-Coordinate of the BufferedImage
* @param startW Desired start X-Coordinate of the BufferedImage
* @param endW Desired end X-Coordinate of the BufferedImage
*/
public void calcXProjection(int startH, int endH, int startW, int endW)
{
int size = Math.abs(endW - startW) + 1;
//System.out.println("Size: " + size);
this.size = size;
xProjection = new int[size];
for (int i = startW; i < endW; i += 1)
{
for (int j = startH; j < endH; j += 1)
{
int color = 0;
try
{
color = buffImage.getRGB(i, j);
}
catch (ArrayIndexOutOfBoundsException e)
{
}
if (color != -1) //if black pixel
<|fim▁hole|> {
xProjection[i-startW] += 1;
}
}
}
}
/**
* Returns the resulting X-Projection of the BufferedImage
* @return xProjection
*/
public int[] getXProjection()
{
return xProjection;
}
/**
* Prints the X-Projection of the BufferedImage
*
*/
public void printXProjection()
{
System.out.println("X Projection");
for (int i=0; i<size; i+=1)
{
System.out.println(xProjection[i]);
}
System.out.println("END X Projection");
}
}<|fim▁end|>
| |
<|file_name|>glue.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this<|fim▁hole|>use env_logger;
use euclid::Size2D;
use parking_lot::RwLock;
use std::mem::transmute;
use std::sync::{Arc, Mutex};
use style::arc_ptr_eq;
use style::context::{LocalStyleContextCreationInfo, ReflowGoal, SharedStyleContext};
use style::dom::{NodeInfo, StylingMode, TElement, TNode};
use style::error_reporting::StdoutErrorReporter;
use style::gecko::data::{NUM_THREADS, PerDocumentStyleData};
use style::gecko::selector_impl::{GeckoSelectorImpl, PseudoElement};
use style::gecko::snapshot::GeckoElementSnapshot;
use style::gecko::traversal::RecalcStyleOnly;
use style::gecko::wrapper::{GeckoElement, GeckoNode};
use style::gecko::wrapper::DUMMY_BASE_URL;
use style::gecko_bindings::bindings::{RawGeckoElementBorrowed, RawGeckoNodeBorrowed};
use style::gecko_bindings::bindings::{RawServoDeclarationBlockBorrowed, RawServoDeclarationBlockStrong};
use style::gecko_bindings::bindings::{RawServoStyleSetBorrowed, RawServoStyleSetOwned};
use style::gecko_bindings::bindings::{RawServoStyleSheetBorrowed, ServoComputedValuesBorrowed};
use style::gecko_bindings::bindings::{RawServoStyleSheetStrong, ServoComputedValuesStrong};
use style::gecko_bindings::bindings::{ThreadSafePrincipalHolder, ThreadSafeURIHolder};
use style::gecko_bindings::bindings::Gecko_Utf8SliceToString;
use style::gecko_bindings::bindings::ServoComputedValuesBorrowedOrNull;
use style::gecko_bindings::bindings::nsACString;
use style::gecko_bindings::structs::{SheetParsingMode, nsIAtom};
use style::gecko_bindings::structs::ServoElementSnapshot;
use style::gecko_bindings::structs::nsRestyleHint;
use style::gecko_bindings::structs::nsString;
use style::gecko_bindings::sugar::ownership::{FFIArcHelpers, HasArcFFI, HasBoxFFI};
use style::gecko_bindings::sugar::ownership::{HasSimpleFFI, Strong};
use style::gecko_bindings::sugar::refptr::{GeckoArcPrincipal, GeckoArcURI};
use style::parallel;
use style::parser::{ParserContext, ParserContextExtraData};
use style::properties::{CascadeFlags, ComputedValues, Importance, PropertyDeclaration};
use style::properties::{PropertyDeclarationParseResult, PropertyDeclarationBlock};
use style::properties::{cascade, parse_one_declaration};
use style::selector_impl::PseudoElementCascadeType;
use style::selector_matching::ApplicableDeclarationBlock;
use style::sequential;
use style::string_cache::Atom;
use style::stylesheets::{Origin, Stylesheet};
use style::timer::Timer;
use url::Url;
/*
* For Gecko->Servo function calls, we need to redeclare the same signature that was declared in
* the C header in Gecko. In order to catch accidental mismatches, we run rust-bindgen against
* those signatures as well, giving us a second declaration of all the Servo_* functions in this
* crate. If there's a mismatch, LLVM will assert and abort, which is a rather awful thing to
* depend on but good enough for our purposes.
*/
#[no_mangle]
pub extern "C" fn Servo_Initialize() -> () {
// Enable standard Rust logging.
//
// See https://doc.rust-lang.org/log/env_logger/index.html for instructions.
env_logger::init().unwrap();
// Allocate our default computed values.
unsafe { ComputedValues::initialize(); }
}
#[no_mangle]
pub extern "C" fn Servo_Shutdown() -> () {
// Destroy our default computed values.
unsafe { ComputedValues::shutdown(); }
}
fn restyle_subtree(element: GeckoElement, raw_data: RawServoStyleSetBorrowed) {
// Force the creation of our lazily-constructed initial computed values on
// the main thread, since it's not safe to call elsewhere.
//
// FIXME(bholley): this should move into Servo_Initialize as soon as we get
// rid of the HackilyFindSomeDeviceContext stuff that happens during
// initial_values computation, since that stuff needs to be called further
// along in startup than the sensible place to call Servo_Initialize.
ComputedValues::initial_values();
// The stylist consumes stylesheets lazily.
let mut per_doc_data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
per_doc_data.flush_stylesheets();
let local_context_data =
LocalStyleContextCreationInfo::new(per_doc_data.new_animations_sender.clone());
let shared_style_context = SharedStyleContext {
// FIXME (bug 1303229): Use the actual viewport size here
viewport_size: Size2D::new(Au(0), Au(0)),
screen_size_changed: false,
generation: 0,
goal: ReflowGoal::ForScriptQuery,
stylist: per_doc_data.stylist.clone(),
running_animations: per_doc_data.running_animations.clone(),
expired_animations: per_doc_data.expired_animations.clone(),
error_reporter: Box::new(StdoutErrorReporter),
local_context_creation_data: Mutex::new(local_context_data),
timer: Timer::new(),
};
if element.styling_mode() == StylingMode::Stop {
error!("Unnecessary call to restyle_subtree");
return;
}
if per_doc_data.num_threads == 1 || per_doc_data.work_queue.is_none() {
sequential::traverse_dom::<_, RecalcStyleOnly>(element.as_node(), &shared_style_context);
} else {
parallel::traverse_dom::<_, RecalcStyleOnly>(element.as_node(), &shared_style_context,
per_doc_data.work_queue.as_mut().unwrap());
}
}
#[no_mangle]
pub extern "C" fn Servo_RestyleSubtree(node: RawGeckoNodeBorrowed,
raw_data: RawServoStyleSetBorrowed) -> () {
let node = GeckoNode(node);
if let Some(element) = node.as_element() {
restyle_subtree(element, raw_data);
}
}
#[no_mangle]
pub extern "C" fn Servo_RestyleWithAddedDeclaration(declarations: RawServoDeclarationBlockBorrowed,
previous_style: ServoComputedValuesBorrowed)
-> ServoComputedValuesStrong
{
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
let declaration_block = ApplicableDeclarationBlock {
mixed_declarations: declarations.clone(),
importance: Importance::Normal,
source_order: 0,
specificity: ::std::u32::MAX,
};
let previous_style = ComputedValues::as_arc(&previous_style);
// FIXME (bug 1303229): Use the actual viewport size here
let (computed, _) = cascade(Size2D::new(Au(0), Au(0)),
&[declaration_block],
Some(previous_style),
None,
None,
Box::new(StdoutErrorReporter),
CascadeFlags::empty());
Arc::new(computed).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_StyleWorkerThreadCount() -> u32 {
*NUM_THREADS as u32
}
#[no_mangle]
pub extern "C" fn Servo_Node_ClearNodeData(node: RawGeckoNodeBorrowed) -> () {
if let Some(element) = GeckoNode(node).as_element() {
element.clear_data();
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_FromUTF8Bytes(data: *const nsACString,
mode: SheetParsingMode,
base_url: *const nsACString,
base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoStyleSheetStrong {
let input = unsafe { data.as_ref().unwrap().as_str_unchecked() };
let origin = match mode {
SheetParsingMode::eAuthorSheetFeatures => Origin::Author,
SheetParsingMode::eUserSheetFeatures => Origin::User,
SheetParsingMode::eAgentSheetFeatures => Origin::UserAgent,
};
let base_str = unsafe { base_url.as_ref().unwrap().as_str_unchecked() };
let url = Url::parse(base_str).unwrap();
let extra_data = unsafe { ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
}};
let sheet = Arc::new(Stylesheet::from_str(input, url, origin, Box::new(StdoutErrorReporter),
extra_data));
unsafe {
transmute(sheet)
}
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_AppendStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.push(sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_PrependStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets.insert(0, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_InsertStyleSheetBefore(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed,
raw_reference: RawServoStyleSheetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
let reference = HasArcFFI::as_arc(&raw_reference);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
let index = data.stylesheets.iter().position(|x| arc_ptr_eq(x, reference)).unwrap();
data.stylesheets.insert(index, sheet.clone());
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_RemoveStyleSheet(raw_data: RawServoStyleSetBorrowed,
raw_sheet: RawServoStyleSheetBorrowed) {
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
let sheet = HasArcFFI::as_arc(&raw_sheet);
data.stylesheets.retain(|x| !arc_ptr_eq(x, sheet));
data.stylesheets_changed = true;
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_HasRules(raw_sheet: RawServoStyleSheetBorrowed) -> bool {
!Stylesheet::as_arc(&raw_sheet).rules.is_empty()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_AddRef(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::addref(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSheet_Release(sheet: RawServoStyleSheetBorrowed) -> () {
unsafe { Stylesheet::release(sheet) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Get(node: RawGeckoNodeBorrowed)
-> ServoComputedValuesStrong {
let node = GeckoNode(node);
// Gecko erroneously calls this function from ServoRestyleManager::RecreateStyleContexts.
// We plan to fix that, but just support it for now until that code gets rewritten.
if node.is_text_node() {
error!("Don't call Servo_ComputedValue_Get() for text nodes");
let parent = node.parent_node().unwrap().as_element().unwrap();
let parent_cv = parent.borrow_data().map_or_else(|| Arc::new(ComputedValues::initial_values().clone()),
|x| x.get_current_styles().unwrap()
.primary.clone());
return ComputedValues::inherit_from(&parent_cv).into_strong();
}
let element = node.as_element().unwrap();
let data = element.borrow_data();
let arc_cv = match data.as_ref().and_then(|x| x.get_current_styles()) {
Some(styles) => styles.primary.clone(),
None => {
// FIXME(bholley): This case subverts the intended semantics of this
// function, and exists only to make stylo builds more robust corner-
// cases where Gecko wants the style for a node that Servo never
// traversed. We should remove this as soon as possible.
error!("stylo: encountered unstyled node, substituting default values.");
Arc::new(ComputedValues::initial_values().clone())
},
};
arc_cv.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForAnonymousBox(parent_style_or_null: ServoComputedValuesBorrowedOrNull,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowed)
-> ServoComputedValuesStrong {
// The stylist consumes stylesheets lazily.
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
data.flush_stylesheets();
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ true);
let maybe_parent = ComputedValues::arc_from_borrowed(&parent_style_or_null);
let new_computed = data.stylist.precomputed_values_for_pseudo(&pseudo, maybe_parent, false);
new_computed.map_or(Strong::null(), |c| c.into_strong())
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_GetForPseudoElement(parent_style: ServoComputedValuesBorrowed,
match_element: RawGeckoElementBorrowed,
pseudo_tag: *mut nsIAtom,
raw_data: RawServoStyleSetBorrowed,
is_probe: bool)
-> ServoComputedValuesStrong {
debug_assert!(!(match_element as *const _).is_null());
let parent_or_null = || {
if is_probe {
Strong::null()
} else {
ComputedValues::as_arc(&parent_style).clone().into_strong()
}
};
let atom = Atom::from(pseudo_tag);
let pseudo = PseudoElement::from_atom_unchecked(atom, /* anon_box = */ false);
// The stylist consumes stylesheets lazily.
let mut data = PerDocumentStyleData::from_ffi(raw_data).borrow_mut();
data.flush_stylesheets();
let element = GeckoElement(match_element);
match GeckoSelectorImpl::pseudo_element_cascade_type(&pseudo) {
PseudoElementCascadeType::Eager => {
let maybe_computed = element.get_pseudo_style(&pseudo);
maybe_computed.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Lazy => {
let parent = ComputedValues::as_arc(&parent_style);
data.stylist
.lazily_compute_pseudo_element_style(&element, &pseudo, parent)
.map_or_else(parent_or_null, FFIArcHelpers::into_strong)
}
PseudoElementCascadeType::Precomputed => {
unreachable!("Anonymous pseudo found in \
Servo_GetComputedValuesForPseudoElement");
}
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Inherit(parent_style: ServoComputedValuesBorrowedOrNull)
-> ServoComputedValuesStrong {
let maybe_arc = ComputedValues::arc_from_borrowed(&parent_style);
let style = if let Some(reference) = maybe_arc.as_ref() {
ComputedValues::inherit_from(reference)
} else {
Arc::new(ComputedValues::initial_values().clone())
};
style.into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_AddRef(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::addref(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_ComputedValues_Release(ptr: ServoComputedValuesBorrowed) -> () {
unsafe { ComputedValues::release(ptr) };
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Init() -> RawServoStyleSetOwned {
let data = Box::new(PerDocumentStyleData::new());
data.into_ffi()
}
#[no_mangle]
pub extern "C" fn Servo_StyleSet_Drop(data: RawServoStyleSetOwned) -> () {
let _ = data.into_box::<PerDocumentStyleData>();
}
#[no_mangle]
pub extern "C" fn Servo_ParseProperty(property: *const nsACString, value: *const nsACString,
base_url: *const nsACString, base: *mut ThreadSafeURIHolder,
referrer: *mut ThreadSafeURIHolder,
principal: *mut ThreadSafePrincipalHolder)
-> RawServoDeclarationBlockStrong {
let name = unsafe { property.as_ref().unwrap().as_str_unchecked() };
let value = unsafe { value.as_ref().unwrap().as_str_unchecked() };
let base_str = unsafe { base_url.as_ref().unwrap().as_str_unchecked() };
let base_url = Url::parse(base_str).unwrap();
let extra_data = unsafe { ParserContextExtraData {
base: Some(GeckoArcURI::new(base)),
referrer: Some(GeckoArcURI::new(referrer)),
principal: Some(GeckoArcPrincipal::new(principal)),
}};
let context = ParserContext::new_with_extra_data(Origin::Author, &base_url,
Box::new(StdoutErrorReporter),
extra_data);
let mut results = vec![];
match PropertyDeclaration::parse(name, &context, &mut Parser::new(value),
&mut results, false) {
PropertyDeclarationParseResult::ValidOrIgnoredDeclaration => {},
_ => return RawServoDeclarationBlockStrong::null(),
}
let results = results.into_iter().map(|r| (r, Importance::Normal)).collect();
Arc::new(RwLock::new(PropertyDeclarationBlock {
declarations: results,
important_count: 0,
})).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_ParseStyleAttribute(data: *const nsACString) -> RawServoDeclarationBlockStrong {
let value = unsafe { data.as_ref().unwrap().as_str_unchecked() };
Arc::new(RwLock::new(GeckoElement::parse_style_attribute(value))).into_strong()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_AddRef(declarations: RawServoDeclarationBlockBorrowed) {
unsafe { RwLock::<PropertyDeclarationBlock>::addref(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Release(declarations: RawServoDeclarationBlockBorrowed) {
unsafe { RwLock::<PropertyDeclarationBlock>::release(declarations) };
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_Equals(a: RawServoDeclarationBlockBorrowed,
b: RawServoDeclarationBlockBorrowed)
-> bool {
*RwLock::<PropertyDeclarationBlock>::as_arc(&a).read() == *RwLock::<PropertyDeclarationBlock>::as_arc(&b).read()
}
#[no_mangle]
pub extern "C" fn Servo_DeclarationBlock_SerializeOneValue(
declarations: RawServoDeclarationBlockBorrowed,
buffer: *mut nsString)
{
let mut string = String::new();
let declarations = RwLock::<PropertyDeclarationBlock>::as_arc(&declarations);
declarations.read().to_css(&mut string).unwrap();
// FIXME: We are expecting |declarations| to be a declaration block with either a single
// longhand property-declaration or a series of longhand property-declarations that make
// up a single shorthand property. As a result, it should be possible to serialize
// |declarations| as a single declaration. However, we only want to return the *value* from
// that single declaration. For now, we just manually strip the property name, colon,
// leading spacing, and trailing space. In future we should find a more robust way to do
// this.
//
// See https://github.com/servo/servo/issues/13423
debug_assert!(string.find(':').is_some());
let position = string.find(':').unwrap();
// Get the value after the first colon and any following whitespace.
let value = &string[(position + 1)..].trim_left();
debug_assert!(value.ends_with(';'));
let length = value.len() - 1; // Strip last semicolon.
// FIXME: Once we have nsString bindings for Servo (bug 1294742), we should be able to drop
// this and fill in |buffer| directly.
unsafe {
Gecko_Utf8SliceToString(buffer, value.as_ptr(), length);
}
}
#[no_mangle]
pub extern "C" fn Servo_CSSSupports(property: *const nsACString, value: *const nsACString) -> bool {
let property = unsafe { property.as_ref().unwrap().as_str_unchecked() };
let value = unsafe { value.as_ref().unwrap().as_str_unchecked() };
let base_url = &*DUMMY_BASE_URL;
let extra_data = ParserContextExtraData::default();
match parse_one_declaration(&property, &value, &base_url, Box::new(StdoutErrorReporter), extra_data) {
Ok(decls) => !decls.is_empty(),
Err(()) => false,
}
}
#[no_mangle]
pub extern "C" fn Servo_ComputeRestyleHint(element: RawGeckoElementBorrowed,
snapshot: *mut ServoElementSnapshot,
raw_data: RawServoStyleSetBorrowed) -> nsRestyleHint {
let per_doc_data = PerDocumentStyleData::from_ffi(raw_data).borrow();
let snapshot = unsafe { GeckoElementSnapshot::from_raw(snapshot) };
let element = GeckoElement(element);
// NB: This involves an FFI call, we can get rid of it easily if needed.
let current_state = element.get_state();
let hint = per_doc_data.stylist
.compute_restyle_hint(&element, &snapshot,
current_state);
// NB: Binary representations match.
unsafe { transmute(hint.bits() as u32) }
}<|fim▁end|>
|
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use app_units::Au;
use cssparser::{Parser, ToCss};
|
<|file_name|>Hook.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
############################ Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <[email protected]> #
# Copyright 2012 Zearin <[email protected]> #
# Copyright 2013 AKFish <[email protected]> #
# Copyright 2013 Vincent Jacques <[email protected]> #
# Copyright 2014 Vincent Jacques <[email protected]> #
# Copyright 2016 Jannis Gebauer <[email protected]> #
# Copyright 2016 Peter Buckley <[email protected]> #
# Copyright 2017 Wan Liuyang <[email protected]> #
# Copyright 2018 Wan Liuyang <[email protected]> #
# Copyright 2018 sfdye <[email protected]> #
# #
# This file is part of PyGithub. #
# http://pygithub.readthedocs.io/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
################################################################################
from __future__ import absolute_import
import six
import github.GithubObject
import github.HookResponse
class Hook(github.GithubObject.CompletableGithubObject):
"""
This class represents Hooks. The reference can be found here http://developer.github.com/v3/repos/hooks
"""
def __repr__(self):
return self.get__repr__({"id": self._id.value, "url": self._url.value})
@property
def active(self):
"""
:type: bool
"""
self._completeIfNotSet(self._active)
return self._active.value
@property
def config(self):
"""
:type: dict
"""
self._completeIfNotSet(self._config)
return self._config.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def events(self):
"""
:type: list of string
"""
self._completeIfNotSet(self._events)
return self._events.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def last_response(self):
"""
:type: :class:`github.HookResponse.HookResponse`
"""
self._completeIfNotSet(self._last_response)
return self._last_response.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def test_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._test_url)
return self._test_url.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
@property
def ping_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._ping_url)
return self._ping_url.value
def delete(self):
"""
:calls: `DELETE /repos/:owner/:repo/hooks/:id <http://developer.github.com/v3/repos/hooks>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck("DELETE", self.url)
def edit(
self,
name,
config,
events=github.GithubObject.NotSet,
add_events=github.GithubObject.NotSet,
remove_events=github.GithubObject.NotSet,
active=github.GithubObject.NotSet,
):
"""
:calls: `PATCH /repos/:owner/:repo/hooks/:id <http://developer.github.com/v3/repos/hooks>`_
:param name: string
:param config: dict
:param events: list of string
:param add_events: list of string
:param remove_events: list of string
:param active: bool
:rtype: None
"""
assert isinstance(name, (str, six.text_type)), name
assert isinstance(config, dict), config
assert events is github.GithubObject.NotSet or all(
isinstance(element, (str, six.text_type)) for element in events
), events
assert add_events is github.GithubObject.NotSet or all(<|fim▁hole|> ), add_events
assert remove_events is github.GithubObject.NotSet or all(
isinstance(element, (str, six.text_type)) for element in remove_events
), remove_events
assert active is github.GithubObject.NotSet or isinstance(active, bool), active
post_parameters = {
"name": name,
"config": config,
}
if events is not github.GithubObject.NotSet:
post_parameters["events"] = events
if add_events is not github.GithubObject.NotSet:
post_parameters["add_events"] = add_events
if remove_events is not github.GithubObject.NotSet:
post_parameters["remove_events"] = remove_events
if active is not github.GithubObject.NotSet:
post_parameters["active"] = active
headers, data = self._requester.requestJsonAndCheck(
"PATCH", self.url, input=post_parameters
)
self._useAttributes(data)
def test(self):
"""
:calls: `POST /repos/:owner/:repo/hooks/:id/tests <http://developer.github.com/v3/repos/hooks>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck("POST", self.url + "/tests")
def ping(self):
"""
:calls: `POST /repos/:owner/:repo/hooks/:id/pings <http://developer.github.com/v3/repos/hooks>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck("POST", self.url + "/pings")
def _initAttributes(self):
self._active = github.GithubObject.NotSet
self._config = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._events = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._last_response = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._test_url = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
self._ping_url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "active" in attributes: # pragma no branch
self._active = self._makeBoolAttribute(attributes["active"])
if "config" in attributes: # pragma no branch
self._config = self._makeDictAttribute(attributes["config"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "events" in attributes: # pragma no branch
self._events = self._makeListOfStringsAttribute(attributes["events"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "last_response" in attributes: # pragma no branch
self._last_response = self._makeClassAttribute(
github.HookResponse.HookResponse, attributes["last_response"]
)
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "test_url" in attributes: # pragma no branch
self._test_url = self._makeStringAttribute(attributes["test_url"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
if "ping_url" in attributes: # pragma no branch
self._ping_url = self._makeStringAttribute(attributes["ping_url"])<|fim▁end|>
|
isinstance(element, (str, six.text_type)) for element in add_events
|
<|file_name|>phone_alert_status.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
NAME="Phone Alert Status"
|
<|file_name|>projections.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from math import pi, sin, log, exp, atan
DEG_TO_RAD = pi / 180
RAD_TO_DEG = 180 / pi
def minmax (a,b,c):
a = max(a,b)
a = min(a,c)
return a
class GoogleProjection:
"""<|fim▁hole|> self.Bc = []
self.Cc = []
self.zc = []
self.Ac = []
c = 256
for d in range(levels + 1):
e = c/2;
self.Bc.append(c/360.0)
self.Cc.append(c/(2 * pi))
self.zc.append((e,e))
self.Ac.append(c)
c *= 2
def fromLLtoPixel(self, ll, zoom):
d = self.zc[zoom]
e = round(d[0] + ll[0] * self.Bc[zoom])
f = minmax(sin(DEG_TO_RAD * ll[1]),-0.9999,0.9999)
g = round(d[1] + 0.5*log((1+f)/(1-f))*-self.Cc[zoom])
return (e,g)
def fromPixelToLL(self, px, zoom):
e = self.zc[zoom]
f = (px[0] - e[0])/self.Bc[zoom]
g = (px[1] - e[1])/-self.Cc[zoom]
h = RAD_TO_DEG * ( 2 * atan(exp(g)) - 0.5 * pi)
return (f,h)<|fim▁end|>
|
Google projection transformations. Sourced from the OSM.
Have not taken the time to figure out how this works.
"""
def __init__(self, levels=18):
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Coherence phase
//
// The job of the coherence phase of typechecking is to ensure that
// each trait has at most one implementation for each type. This is
// done by the orphan and overlap modules. Then we build up various
// mappings. That mapping code resides here.
use metadata::csearch::{each_impl, get_impl_trait};
use metadata::csearch;
use middle::subst;
use middle::subst::{Substs};
use middle::ty::get;
use middle::ty::{ImplContainer, ImplOrTraitItemId, MethodTraitItemId};
use middle::ty::{TypeTraitItemId, lookup_item_type};
use middle::ty::{t, ty_bool, ty_char, ty_bot, ty_enum, ty_err};
use middle::ty::{ty_str, ty_vec, ty_float, ty_infer, ty_int, ty_nil, ty_open};
use middle::ty::{ty_param, Polytype, ty_ptr};
use middle::ty::{ty_rptr, ty_struct, ty_trait, ty_tup};
use middle::ty::{ty_uint, ty_unboxed_closure, ty_uniq, ty_bare_fn};
use middle::ty::{ty_closure};
use middle::ty::type_is_ty_var;
use middle::subst::Subst;
use middle::ty;
use middle::typeck::CrateCtxt;
use middle::typeck::infer::combine::Combine;
use middle::typeck::infer::InferCtxt;
use middle::typeck::infer::{new_infer_ctxt, resolve_ivar, resolve_type};
use std::collections::{HashSet};
use std::cell::RefCell;
use std::rc::Rc;
use syntax::ast::{Crate, DefId};
use syntax::ast::{Item, ItemImpl};
use syntax::ast::{LOCAL_CRATE, TraitRef};
use syntax::ast;
use syntax::ast_map::NodeItem;
use syntax::ast_map;
use syntax::ast_util::{local_def};
use syntax::codemap::{Span};
use syntax::parse::token;
use syntax::visit;
use util::nodemap::{DefIdMap, FnvHashMap};
use util::ppaux::Repr;
mod orphan;
mod overlap;
fn get_base_type(inference_context: &InferCtxt,
span: Span,
original_type: t)
-> Option<t> {
let resolved_type = match resolve_type(inference_context,
Some(span),
original_type,
resolve_ivar) {
Ok(resulting_type) if !type_is_ty_var(resulting_type) => resulting_type,
_ => {
inference_context.tcx.sess.span_fatal(span,
"the type of this value must be known in order \
to determine the base type");
}
};
match get(resolved_type).sty {
ty_enum(..) | ty_struct(..) | ty_unboxed_closure(..) => {
debug!("(getting base type) found base type");
Some(resolved_type)<|fim▁hole|> _ if ty::type_is_trait(resolved_type) => {
debug!("(getting base type) found base type (trait)");
Some(resolved_type)
}
ty_nil | ty_bot | ty_bool | ty_char | ty_int(..) | ty_uint(..) | ty_float(..) |
ty_str(..) | ty_vec(..) | ty_bare_fn(..) | ty_closure(..) | ty_tup(..) |
ty_infer(..) | ty_param(..) | ty_err | ty_open(..) | ty_uniq(_) |
ty_ptr(_) | ty_rptr(_, _) => {
debug!("(getting base type) no base type; found {}",
get(original_type).sty);
None
}
ty_trait(..) => fail!("should have been caught")
}
}
// Returns the def ID of the base type, if there is one.
fn get_base_type_def_id(inference_context: &InferCtxt,
span: Span,
original_type: t)
-> Option<DefId> {
match get_base_type(inference_context, span, original_type) {
None => None,
Some(base_type) => {
match get(base_type).sty {
ty_enum(def_id, _) |
ty_struct(def_id, _) |
ty_unboxed_closure(def_id, _) => {
Some(def_id)
}
ty_ptr(ty::mt {ty, ..}) |
ty_rptr(_, ty::mt {ty, ..}) |
ty_uniq(ty) => {
match ty::get(ty).sty {
ty_trait(box ty::TyTrait { def_id, .. }) => {
Some(def_id)
}
_ => {
fail!("get_base_type() returned a type that wasn't an \
enum, struct, or trait");
}
}
}
ty_trait(box ty::TyTrait { def_id, .. }) => {
Some(def_id)
}
_ => {
fail!("get_base_type() returned a type that wasn't an \
enum, struct, or trait");
}
}
}
}
}
struct CoherenceChecker<'a, 'tcx: 'a> {
crate_context: &'a CrateCtxt<'a, 'tcx>,
inference_context: InferCtxt<'a, 'tcx>,
inherent_impls: RefCell<DefIdMap<Rc<RefCell<Vec<ast::DefId>>>>>,
}
struct CoherenceCheckVisitor<'a, 'tcx: 'a> {
cc: &'a CoherenceChecker<'a, 'tcx>
}
impl<'a, 'tcx, 'v> visit::Visitor<'v> for CoherenceCheckVisitor<'a, 'tcx> {
fn visit_item(&mut self, item: &Item) {
//debug!("(checking coherence) item '{}'", token::get_ident(item.ident));
match item.node {
ItemImpl(_, ref opt_trait, _, _) => {
match opt_trait.clone() {
Some(opt_trait) => {
self.cc.check_implementation(item, [opt_trait]);
}
None => self.cc.check_implementation(item, [])
}
}
_ => {
// Nothing to do.
}
};
visit::walk_item(self, item);
}
}
impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> {
fn check(&self, krate: &Crate) {
// Check implementations and traits. This populates the tables
// containing the inherent methods and extension methods. It also
// builds up the trait inheritance table.
let mut visitor = CoherenceCheckVisitor { cc: self };
visit::walk_crate(&mut visitor, krate);
// Copy over the inherent impls we gathered up during the walk into
// the tcx.
let mut tcx_inherent_impls =
self.crate_context.tcx.inherent_impls.borrow_mut();
for (k, v) in self.inherent_impls.borrow().iter() {
tcx_inherent_impls.insert((*k).clone(),
Rc::new((*v.borrow()).clone()));
}
// Bring in external crates. It's fine for this to happen after the
// coherence checks, because we ensure by construction that no errors
// can happen at link time.
self.add_external_crates();
// Populate the table of destructors. It might seem a bit strange to
// do this here, but it's actually the most convenient place, since
// the coherence tables contain the trait -> type mappings.
self.populate_destructor_table();
}
fn check_implementation(&self,
item: &Item,
associated_traits: &[TraitRef]) {
let tcx = self.crate_context.tcx;
let impl_did = local_def(item.id);
let self_type = ty::lookup_item_type(tcx, impl_did);
// If there are no traits, then this implementation must have a
// base type.
let impl_items = self.create_impl_from_item(item);
for associated_trait in associated_traits.iter() {
let trait_ref = ty::node_id_to_trait_ref(
self.crate_context.tcx, associated_trait.ref_id);
debug!("(checking implementation) adding impl for trait '{}', item '{}'",
trait_ref.repr(self.crate_context.tcx),
token::get_ident(item.ident));
self.add_trait_impl(trait_ref.def_id, impl_did);
}
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation and
// the implementation does not have any associated traits.
match get_base_type_def_id(&self.inference_context,
item.span,
self_type.ty) {
None => {
// Nothing to do.
}
Some(base_type_def_id) => {
// FIXME: Gather up default methods?
if associated_traits.len() == 0 {
self.add_inherent_impl(base_type_def_id, impl_did);
}
}
}
tcx.impl_items.borrow_mut().insert(impl_did, impl_items);
}
// Creates default method IDs and performs type substitutions for an impl
// and trait pair. Then, for each provided method in the trait, inserts a
// `ProvidedMethodInfo` instance into the `provided_method_sources` map.
fn instantiate_default_methods(
&self,
impl_id: DefId,
trait_ref: &ty::TraitRef,
all_impl_items: &mut Vec<ImplOrTraitItemId>) {
let tcx = self.crate_context.tcx;
debug!("instantiate_default_methods(impl_id={}, trait_ref={})",
impl_id, trait_ref.repr(tcx));
let impl_poly_type = ty::lookup_item_type(tcx, impl_id);
let prov = ty::provided_trait_methods(tcx, trait_ref.def_id);
for trait_method in prov.iter() {
// Synthesize an ID.
let new_id = tcx.sess.next_node_id();
let new_did = local_def(new_id);
debug!("new_did={} trait_method={}", new_did, trait_method.repr(tcx));
// Create substitutions for the various trait parameters.
let new_method_ty =
Rc::new(subst_receiver_types_in_method_ty(
tcx,
impl_id,
&impl_poly_type,
trait_ref,
new_did,
&**trait_method,
Some(trait_method.def_id)));
debug!("new_method_ty={}", new_method_ty.repr(tcx));
all_impl_items.push(MethodTraitItemId(new_did));
// construct the polytype for the method based on the
// method_ty. it will have all the generics from the
// impl, plus its own.
let new_polytype = ty::Polytype {
generics: new_method_ty.generics.clone(),
ty: ty::mk_bare_fn(tcx, new_method_ty.fty.clone())
};
debug!("new_polytype={}", new_polytype.repr(tcx));
tcx.tcache.borrow_mut().insert(new_did, new_polytype);
tcx.impl_or_trait_items
.borrow_mut()
.insert(new_did, ty::MethodTraitItem(new_method_ty));
// Pair the new synthesized ID up with the
// ID of the method.
self.crate_context.tcx.provided_method_sources.borrow_mut()
.insert(new_did, trait_method.def_id);
}
}
fn add_inherent_impl(&self, base_def_id: DefId, impl_def_id: DefId) {
match self.inherent_impls.borrow().find(&base_def_id) {
Some(implementation_list) => {
implementation_list.borrow_mut().push(impl_def_id);
return;
}
None => {}
}
self.inherent_impls.borrow_mut().insert(
base_def_id,
Rc::new(RefCell::new(vec!(impl_def_id))));
}
fn add_trait_impl(&self, base_def_id: DefId, impl_def_id: DefId) {
debug!("add_trait_impl: base_def_id={} impl_def_id={}",
base_def_id, impl_def_id);
ty::record_trait_implementation(self.crate_context.tcx,
base_def_id,
impl_def_id);
}
fn get_self_type_for_implementation(&self, impl_did: DefId)
-> Polytype {
self.crate_context.tcx.tcache.borrow().get_copy(&impl_did)
}
// Converts an implementation in the AST to a vector of items.
fn create_impl_from_item(&self, item: &Item) -> Vec<ImplOrTraitItemId> {
match item.node {
ItemImpl(_, ref trait_refs, _, ref ast_items) => {
let mut items: Vec<ImplOrTraitItemId> =
ast_items.iter()
.map(|ast_item| {
match *ast_item {
ast::MethodImplItem(ref ast_method) => {
MethodTraitItemId(
local_def(ast_method.id))
}
ast::TypeImplItem(ref typedef) => {
TypeTraitItemId(local_def(typedef.id))
}
}
}).collect();
for trait_ref in trait_refs.iter() {
let ty_trait_ref = ty::node_id_to_trait_ref(
self.crate_context.tcx,
trait_ref.ref_id);
self.instantiate_default_methods(local_def(item.id),
&*ty_trait_ref,
&mut items);
}
items
}
_ => {
self.crate_context.tcx.sess.span_bug(item.span,
"can't convert a non-impl to an impl");
}
}
}
// External crate handling
fn add_external_impl(&self,
impls_seen: &mut HashSet<DefId>,
impl_def_id: DefId) {
let tcx = self.crate_context.tcx;
let impl_items = csearch::get_impl_items(&tcx.sess.cstore,
impl_def_id);
// Make sure we don't visit the same implementation multiple times.
if !impls_seen.insert(impl_def_id) {
// Skip this one.
return
}
// Good. Continue.
let _ = lookup_item_type(tcx, impl_def_id);
let associated_traits = get_impl_trait(tcx, impl_def_id);
// Do a sanity check.
assert!(associated_traits.is_some());
// Record all the trait items.
for trait_ref in associated_traits.iter() {
self.add_trait_impl(trait_ref.def_id, impl_def_id);
}
// For any methods that use a default implementation, add them to
// the map. This is a bit unfortunate.
for item_def_id in impl_items.iter() {
let impl_item = ty::impl_or_trait_item(tcx, item_def_id.def_id());
match impl_item {
ty::MethodTraitItem(ref method) => {
for &source in method.provided_source.iter() {
tcx.provided_method_sources
.borrow_mut()
.insert(item_def_id.def_id(), source);
}
}
ty::TypeTraitItem(_) => {}
}
}
tcx.impl_items.borrow_mut().insert(impl_def_id, impl_items);
}
// Adds implementations and traits from external crates to the coherence
// info.
fn add_external_crates(&self) {
let mut impls_seen = HashSet::new();
let crate_store = &self.crate_context.tcx.sess.cstore;
crate_store.iter_crate_data(|crate_number, _crate_metadata| {
each_impl(crate_store, crate_number, |def_id| {
assert_eq!(crate_number, def_id.krate);
self.add_external_impl(&mut impls_seen, def_id)
})
})
}
//
// Destructors
//
fn populate_destructor_table(&self) {
let tcx = self.crate_context.tcx;
let drop_trait = match tcx.lang_items.drop_trait() {
Some(id) => id, None => { return }
};
let impl_items = tcx.impl_items.borrow();
let trait_impls = match tcx.trait_impls.borrow().find_copy(&drop_trait) {
None => return, // No types with (new-style) dtors present.
Some(found_impls) => found_impls
};
for &impl_did in trait_impls.borrow().iter() {
let items = impl_items.get(&impl_did);
if items.len() < 1 {
// We'll error out later. For now, just don't ICE.
continue;
}
let method_def_id = *items.get(0);
let self_type = self.get_self_type_for_implementation(impl_did);
match ty::get(self_type.ty).sty {
ty::ty_enum(type_def_id, _) |
ty::ty_struct(type_def_id, _) |
ty::ty_unboxed_closure(type_def_id, _) => {
tcx.destructor_for_type
.borrow_mut()
.insert(type_def_id, method_def_id.def_id());
tcx.destructors
.borrow_mut()
.insert(method_def_id.def_id());
}
_ => {
// Destructors only work on nominal types.
if impl_did.krate == ast::LOCAL_CRATE {
{
match tcx.map.find(impl_did.node) {
Some(ast_map::NodeItem(item)) => {
span_err!(tcx.sess, item.span, E0120,
"the Drop trait may only be implemented on structures");
}
_ => {
tcx.sess.bug("didn't find impl in ast \
map");
}
}
}
} else {
tcx.sess.bug("found external impl of Drop trait on \
something other than a struct");
}
}
}
}
}
}
pub fn make_substs_for_receiver_types(tcx: &ty::ctxt,
trait_ref: &ty::TraitRef,
method: &ty::Method)
-> subst::Substs
{
/*!
* Substitutes the values for the receiver's type parameters
* that are found in method, leaving the method's type parameters
* intact.
*/
let meth_tps: Vec<ty::t> =
method.generics.types.get_slice(subst::FnSpace)
.iter()
.map(|def| ty::mk_param_from_def(tcx, def))
.collect();
let meth_regions: Vec<ty::Region> =
method.generics.regions.get_slice(subst::FnSpace)
.iter()
.map(|def| ty::ReEarlyBound(def.def_id.node, def.space,
def.index, def.name))
.collect();
trait_ref.substs.clone().with_method(meth_tps, meth_regions)
}
fn subst_receiver_types_in_method_ty(tcx: &ty::ctxt,
impl_id: ast::DefId,
impl_poly_type: &ty::Polytype,
trait_ref: &ty::TraitRef,
new_def_id: ast::DefId,
method: &ty::Method,
provided_source: Option<ast::DefId>)
-> ty::Method
{
let combined_substs = make_substs_for_receiver_types(tcx, trait_ref, method);
debug!("subst_receiver_types_in_method_ty: combined_substs={}",
combined_substs.repr(tcx));
let mut method_generics = method.generics.subst(tcx, &combined_substs);
// replace the type parameters declared on the trait with those
// from the impl
for &space in [subst::TypeSpace, subst::SelfSpace].iter() {
method_generics.types.replace(
space,
Vec::from_slice(impl_poly_type.generics.types.get_slice(space)));
method_generics.regions.replace(
space,
Vec::from_slice(impl_poly_type.generics.regions.get_slice(space)));
}
debug!("subst_receiver_types_in_method_ty: method_generics={}",
method_generics.repr(tcx));
let method_fty = method.fty.subst(tcx, &combined_substs);
debug!("subst_receiver_types_in_method_ty: method_ty={}",
method.fty.repr(tcx));
ty::Method::new(
method.ident,
method_generics,
method_fty,
method.explicit_self,
method.vis,
new_def_id,
ImplContainer(impl_id),
provided_source
)
}
pub fn check_coherence(crate_context: &CrateCtxt) {
CoherenceChecker {
crate_context: crate_context,
inference_context: new_infer_ctxt(crate_context.tcx),
inherent_impls: RefCell::new(FnvHashMap::new()),
}.check(crate_context.tcx.map.krate());
orphan::check(crate_context.tcx);
overlap::check(crate_context.tcx);
}<|fim▁end|>
|
}
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup
<|fim▁hole|> license="MIT",
url="https://github.com/jddeal/python-cmr",
description="Python wrapper to the NASA Common Metadata Repository (CMR) API.",
long_description=open("README.rst").read(),
author="Justin Deal, Matt Isnor",
author_email="[email protected], [email protected]",
packages=["cmr"],
install_requires=[
"requests",
]
)<|fim▁end|>
|
setup(
name="python-cmr",
version="0.4.1",
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from pathlib import Path
from jobman.jobman import JobMan
from mc.clients.job_record_client import JobRecordClient
from mc.clients.flow_record_client import FlowRecordClient
from mc.flows.flow_engine import FlowEngine
from mc.db.db import Db
from mc.runners.flow_runner import FlowRunner
from mc.runners.jobman_job_runner.job_runner import JobRunner
class HoustonUtils(object):
JOBS_SUBDIRS = ['pending', 'queued', 'executed', 'archive']
def __init__(self, houston=None):
self.houston = houston
@property
def cfg(self): return self.houston.cfg
@property
def db(self):
if not hasattr(self, '_db'):
self._db = self.generate_db(db_uri=self.cfg['MC_DB_URI'])
return self._db
def generate_db(self, db_uri=None, schema=None):
return Db(db_uri=db_uri, schema=schema)
@db.setter
def db(self, value): self._subcommands = value
def ensure_queues(self):
self.ensure_queue(queue_cfg=self.cfg['FLOW_QUEUE'])
self.ensure_queue(queue_cfg=self.cfg['JOB_QUEUE'])
def ensure_queue(self, queue_cfg=None):
try:
self.db.get_item_by_key(item_type='queue', key=queue_cfg['key'])
except self.db.ItemNotFoundError:
self.db.create_item(
item_type='queue',
item_kwargs={
'key': queue_cfg['key'],
**queue_cfg.get('queue_kwargs', {})
}
)
@property
def flow_runner(self):
if not hasattr(self, '_flow_runner'):
self._flow_runner = FlowRunner(
flow_engine=self.flow_engine,
flow_record_client=self.flow_record_client,
task_ctx={
'mc.flow_record_client': self.flow_record_client,
'mc.job_record_client': self.job_record_client,
}
)
return self._flow_runner
@flow_runner.setter
def flow_runner(self, new_value): self._flow_runner = new_value
@property
def flow_engine(self):
if not hasattr(self, '_flow_engine'):
self._flow_engine = FlowEngine()
return self._flow_engine
@flow_engine.setter
def flow_engine(self, new_value): self._flow_engine = new_value
@property
def flow_record_client(self):
if not hasattr(self, '_flow_record_client'):
self._flow_record_client = self._get_mc_client(record_type='flow')
return self._flow_record_client
@flow_record_client.setter
def flow_record_client(self, new_value):
self._flow_record_client = new_value
@property
def job_record_client(self):
if not hasattr(self, '_job_record_client'):
self._job_record_client = self._get_mc_client(record_type='job')
return self._job_record_client
def _get_mc_client(self, record_type=None):
client_cls = None
if record_type == 'flow':
client_cls = FlowRecordClient
elif record_type == 'job':
client_cls = JobRecordClient
assert client_cls is not None
queue_cfg = self.cfg[record_type.upper() + '_QUEUE']
return client_cls(mc_db=self.db,
use_locks=self.cfg.get('USE_LOCKS', True),
queue_key=queue_cfg['key'])
@job_record_client.setter
def job_record_client(self, new_value): self._job_record_client = new_value
@property
def job_runner(self, mc_clients=None):
if not hasattr(self, '_job_runner'):
self._job_runner = JobRunner(
artifact_handler=self.cfg['ARTIFACT_HANDLER'],
job_record_client=self.job_record_client,
jobman=self.jobman,
jobdirs_dir=self.cfg.get('JOBDIRS_DIR', None),
build_jobdir_fn=self.build_jobdir,
)
return self._job_runner
@job_runner.setter
def job_runner(self, new_value): self._job_runner = new_value
@property
def jobman(self):
if not hasattr(self, '_jobman'):
self._jobman = JobMan.from_cfg(cfg=self.cfg['JOBMAN_CFG'])
return self._jobman
@jobman.setter
def jobman(self, new_value): self._jobman = new_value
def build_jobdir(self, *args, **kwargs):
try:
build_jobdir_fn = self.cfg['BUILD_JOBDIR_FN']
except:
def build_jobdir_fn(*args, **kwargs):
return self.houston.run_command('build_job_dir')
return build_jobdir_fn(*args, **kwargs)
def has_unfinished_mc_records(self):
unfinished_records = self.get_unfinished_mc_records()
for record_type, records in unfinished_records.items():
if len(records) > 0:
return True
return False
def get_unfinished_mc_records(self):
return {
record_type: self._get_unfinished_mc_items(item_type=record_type)
for record_type in ['flow', 'job']
}
def _get_unfinished_mc_items(self, item_type=None):
return self.db.query_items(item_type=item_type, query={
'filters': [
{'field': 'status', 'op': '! IN',
'arg': ['FAILED', 'COMPLETED']}
]
})
def ensure_job_dirs(self):
for dir in self.job_dirs.values():
Path(dir).mkdir(parents=True, exist_ok=True)
@property
def job_dirs(self):
if not hasattr(self, '_job_dirs'):
self._job_dirs = {'root': self.cfg.get('JOB_DIRS_ROOT', None)}
for jobs_subdir in self.JOBS_SUBDIRS:
self._job_dirs[jobs_subdir] = str(Path(self._job_dirs['root'],
jobs_subdir))
return self._job_dirs
@job_dirs.setter<|fim▁hole|> @property
def archiver(self):
if not hasattr(self, '_archiver'):
self._archiver = self._generate_archiver()
return self._archiver
def _generate_archiver(self):
from mc.utils.archivers.dir_archiver import DirArchiver
return DirArchiver(root_dir=self.job_dirs['archive'])
@property
def entity_selector(self):
if not hasattr(self, '_entity_selector'):
from mc.utils.selectors.basic_entity_selector import (
BasicEntitySelector)
self._entity_selector = BasicEntitySelector(db=self.db)
return self._entity_selector
@property
def request_selector(self):
if not hasattr(self, '_request_selector'):
from mc.utils.selectors.basic_request_selector import (
BasicRequestSelector)
self._request_selector = BasicRequestSelector(db=self.db)
return self._request_selector<|fim▁end|>
|
def job_dirs(self, value): self._job_dirs = value
|
<|file_name|>deslizador.py<|end_file_name|><|fim▁begin|>import pilasengine
# Permite que este ejemplo funcion incluso si no has instalado pilas.
import sys
sys.path.insert(0, "..")
pilas = pilasengine.iniciar()
mono = pilas.actores.Mono(y=-100)
def cuando_cambia_escala(valor):
mono.escala = valor * 2
deslizador_escala = pilas.interfaz.Deslizador(y=50)
deslizador_escala.conectar(cuando_cambia_escala)
<|fim▁hole|>
deslizador_rotacion = pilas.interfaz.Deslizador(y=100)
deslizador_rotacion.conectar(cuando_cambia_rotacion)
def cuando_cambia_posicion(valor):
# Obtiene valores entre -200 y 400
mono.x = -200 + 400 * valor
print valor
deslizador_posicion = pilas.interfaz.Deslizador(y=150)
deslizador_posicion.conectar(cuando_cambia_posicion)
pilas.avisar("Usa el deslizador para modificar al mono.")
pilas.ejecutar()<|fim▁end|>
|
def cuando_cambia_rotacion(valor):
mono.rotacion = valor * 360
|
<|file_name|>to_javascript.rs<|end_file_name|><|fim▁begin|>//! # To JavaScript
//!
//! This module defines the trait for translating Robin code to JavaScript
use error::Error;
use stdlib::Stdlib;
pub trait ToJavaScript {
fn eval(&mut self, stdlib: &mut Stdlib) -> Result<String, Error>;<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>test_tmpdir.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import json
import os
import shutil
import tempfile
import pytest
from units.compat.mock import patch, MagicMock
from ansible.module_utils._text import to_bytes
from ansible.module_utils import basic
class TestAnsibleModuleTmpDir:
DATA = (
(
{
"_ansible_tmpdir": "/path/to/dir",
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False,
},
True,
"/path/to/dir"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False
},
False,
"/path/tmpdir/ansible-moduletmp-42-"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "/path/tmpdir",
"_ansible_keep_remote_files": False
},
True,
"/path/tmpdir/ansible-moduletmp-42-"
),
(
{
"_ansible_tmpdir": None,
"_ansible_remote_tmp": "$HOME/.test",
"_ansible_keep_remote_files": False
},
False,
os.path.join(os.environ['HOME'], ".test/ansible-moduletmp-42-")
),
)
# pylint bug: https://github.com/PyCQA/pylint/issues/511
# pylint: disable=undefined-variable
@pytest.mark.parametrize('args, expected, stat_exists', ((s, e, t) for s, t, e in DATA))
def test_tmpdir_property(self, monkeypatch, args, expected, stat_exists):
makedirs = {'called': False}
def mock_mkdtemp(prefix, dir):
return os.path.join(dir, prefix)
def mock_makedirs(path, mode):
makedirs['called'] = True
makedirs['path'] = path
makedirs['mode'] = mode
return
monkeypatch.setattr(tempfile, 'mkdtemp', mock_mkdtemp)
monkeypatch.setattr(os.path, 'exists', lambda x: stat_exists)
monkeypatch.setattr(os, 'makedirs', mock_makedirs)
monkeypatch.setattr(shutil, 'rmtree', lambda x: None)
monkeypatch.setattr(basic, '_ANSIBLE_ARGS', to_bytes(json.dumps({'ANSIBLE_MODULE_ARGS': args})))
with patch('time.time', return_value=42):
am = basic.AnsibleModule(argument_spec={})
actual_tmpdir = am.tmpdir
assert actual_tmpdir == expected
# verify subsequent calls always produces the same tmpdir
assert am.tmpdir == actual_tmpdir
if not stat_exists:
assert makedirs['called']
expected = os.path.expanduser(os.path.expandvars(am._remote_tmp))
assert makedirs['path'] == expected
assert makedirs['mode'] == 0o700
@pytest.mark.parametrize('stdin', ({"_ansible_tmpdir": None,
"_ansible_remote_tmp": "$HOME/.test",
"_ansible_keep_remote_files": True},),
indirect=['stdin'])
def test_tmpdir_makedirs_failure(self, am, monkeypatch):
<|fim▁hole|> monkeypatch.setattr(os.path, 'exists', lambda x: False)
monkeypatch.setattr(os, 'makedirs', mock_makedirs)
actual = am.tmpdir
assert actual == "/tmp/path"
assert mock_makedirs.call_args[0] == (os.path.expanduser(os.path.expandvars("$HOME/.test")),)
assert mock_makedirs.call_args[1] == {"mode": 0o700}
# because makedirs failed the dir should be None so it uses the System tmp
assert mock_mkdtemp.call_args[1]['dir'] is None
assert mock_mkdtemp.call_args[1]['prefix'].startswith("ansible-moduletmp-")<|fim▁end|>
|
mock_mkdtemp = MagicMock(return_value="/tmp/path")
mock_makedirs = MagicMock(side_effect=OSError("Some OS Error here"))
monkeypatch.setattr(tempfile, 'mkdtemp', mock_mkdtemp)
|
<|file_name|>tag_pattern.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
tag_treetagger.py
This module is a wrapper to TreeTagger.
'''
import os
import sys
import string
import six
import sklearn
from itertools import chain
from bakfu.core.routes import register
from bakfu.process.base import BaseProcessor
__errors__ = []
try:
import pattern.fr
except Exception:
e = sys.exc_info()
__errors__.append(e)
try:
import pattern.en
except Exception:
e = sys.exc_info()
if e[1].msg == "No module named 'pywordnet'":
# py3 incompatibility ; must be fixed in pattern
pass
else:
__errors__.append(e)
def tag(tagger, sentence):
'''
'''
lemmas = []
[lemmas.extend(a.lemma) for a in tagger(sentence,tokenize = True, tags = True, chunks = True, relations = True, lemmata = True, light = False)]
return lemmas
@register('tagging.pattern', __errors__)
class PatternTagger(BaseProcessor):
'''
Pre-processes data with pattern.
:Example:
.. doctest::
>>>from bakfu.examples.dataset1 import DATA
>>>import nltk
>>>baf = bakfu.Chain(lang="en")
>>>baf.load("data.simple",DATA)
>>>baf.process('tagging.pattern')
>>>baf.process('vectorize.sklearn',
... min_df = 2,<|fim▁hole|> ... max_features=100,
... tokenizer=lambda x:x,
... )
... preprocessor=lambda x:x,
>>>print(baf.get_chain("vectorizer").get_feature_names())
>>>print(baf.get_chain("vectorizer_result").toarray()[0])
'''
init_args = ()
init_kwargs = ()
run_args = ()
run_kwargs = ()
def __init__(self, *args, **kwargs):
super(PatternTagger, self).__init__(*args, **kwargs)
self.tagger = None
def run(self, caller, *args, **kwargs):
'''
'''
super(PatternTagger, self).run(caller, *args, **kwargs)
data_source = caller.get_chain('data_source')
self.caller=caller
lang = caller.get('lang')
if lang == 'fr':
self.tagger = pattern.fr.parsetree
elif lang == 'en':
self.tagger = pattern.en.parsetree
cur_data = data_source.get_data()
result = [tag(self.tagger, s) for s in cur_data]
caller.data['result'] = result
#reformat data to ((id,data),...)
#note: data now contains lists of tokens instead of sentences
uids = data_source.get_uids()
new_data = zip(uids, result)
#Assign processed data to a new data source
new_data_source = self.caller.load_unchained("data.simple", new_data)
new_data_source.meta_data = {"tokenized":True}
self._data.update(
{'result':result,
'tagger_result':result,
'data_source':new_data_source,
})
return self<|fim▁end|>
|
... ngram_range=(1, 3),
... #stop_words=nltk.corpus.stopwords.words(baf.get('language')),
|
<|file_name|>avengers.js<|end_file_name|><|fim▁begin|>var Avenger = require('mongoose').model('Avenger');
exports.getAvengers = function (req, res) {
Avenger.find({}).exec(function (err, collection) {
res.send(collection);
});
};
exports.getAvengerById = function (req, res) {<|fim▁hole|> res.send(avenger);
});
};<|fim▁end|>
|
Avenger.findOne({_id:req.params.id}).exec(function (err, avenger) {
|
<|file_name|>reissue_certificate_order_request.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .proxy_only_resource import ProxyOnlyResource
class ReissueCertificateOrderRequest(ProxyOnlyResource):
"""Class representing certificate reissue request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key_size: Certificate Key Size.
:type key_size: int
:param delay_existing_revoke_in_hours: Delay in hours to revoke existing
certificate after the new certificate is issued.
:type delay_existing_revoke_in_hours: int
:param csr: Csr to be used for re-key operation.
:type csr: str
:param is_private_key_external: Should we change the ASC type (from
managed private key to external private key and vice versa).
:type is_private_key_external: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},<|fim▁hole|>
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key_size': {'key': 'properties.keySize', 'type': 'int'},
'delay_existing_revoke_in_hours': {'key': 'properties.delayExistingRevokeInHours', 'type': 'int'},
'csr': {'key': 'properties.csr', 'type': 'str'},
'is_private_key_external': {'key': 'properties.isPrivateKeyExternal', 'type': 'bool'},
}
def __init__(self, kind=None, key_size=None, delay_existing_revoke_in_hours=None, csr=None, is_private_key_external=None):
super(ReissueCertificateOrderRequest, self).__init__(kind=kind)
self.key_size = key_size
self.delay_existing_revoke_in_hours = delay_existing_revoke_in_hours
self.csr = csr
self.is_private_key_external = is_private_key_external<|fim▁end|>
|
}
|
<|file_name|>test_common.py<|end_file_name|><|fim▁begin|>import json
import time
import pytest
from anchore_engine.auth.common import (
get_creds_by_registry,
get_docker_registry_userpw,
registry_record_matches,
)
_test_username = "tonystark"
_test_password = "potts"
_test_registry_meta = {
"authorizationToken": "{}:{}".format(_test_username, _test_password)
}
_record_ecr = {
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
}
_record_not_ecr = {
"registry_type": "other-registry",
"registry_user": _test_username,
"registry_pass": _test_password,
}<|fim▁hole|> "record_state_key": "inactive",
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
"registry_verify": True,
}
_record_ecr_unavailable = {
"registry": "docker.io",
"record_state_key": "inactive",
"record_state_val": time.time(), # note: technically this could yield nondeterministic results
"registry_type": "awsecr",
"registry_meta": json.dumps(_test_registry_meta),
"registry_verify": True,
}
@pytest.mark.parametrize("registry_record", [_record_ecr, _record_not_ecr])
def test_get_docker_registry_userpw(registry_record):
result = get_docker_registry_userpw(registry_record)
assert result == (_test_username, _test_password)
def test_get_docker_registry_userpw_bad_json():
record_ecr_bad_json = {
"registry_type": "awsecr",
"registry_meta": "this-is-not-valid-json!}",
}
with pytest.raises(Exception):
get_docker_registry_userpw(record_ecr_bad_json)
@pytest.mark.parametrize(
"registry,repository,registry_creds,expected",
[
("docker.io", "library/node", None, (None, None, None)),
(
"docker.io",
"library/node",
[_record_ecr_inactive],
(_test_username, _test_password, True),
),
],
)
def test_get_creds_by_registry(registry, repository, registry_creds, expected):
result = get_creds_by_registry(registry, repository, registry_creds)
assert result == expected
def test_get_creds_by_registry_unavailable():
with pytest.raises(Exception):
get_creds_by_registry("docker.io", "library/node", [_record_ecr_unavailable])
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io/library/centos", "docker.io", "library/centos"),
("docker.io", "docker.io", "centos"),
("docker.io", "docker.io", "myuser/myrepo"),
],
)
def test_registry_record_matches_exact(registry_record_str, registry, repository):
assert registry_record_matches(registry_record_str, registry, repository)
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io/library/*", "docker.io", "library/centos"),
("docker.io/*", "docker.io", "library/centos"),
("gcr.io/myproject/*", "gcr.io", "myproject/myuser/myrepo"),
],
)
def test_registry_record_matches_wildcard(registry_record_str, registry, repository):
assert registry_record_matches(registry_record_str, registry, repository)
@pytest.mark.parametrize(
"registry_record_str,registry,repository",
[
("docker.io", "gcr.io", "myproject/myuser"),
("docker.io/*", "gcr.io", "myproject/myuser"),
("docker.io/library/*", "docker.io", "myuser/myrepo"),
("docker.io/myuser/myrepo", "docker.io", "myuser/myrepo2"),
],
)
def test_registry_record_matches_non(registry_record_str, registry, repository):
assert not registry_record_matches(registry_record_str, registry, repository)<|fim▁end|>
|
_record_ecr_inactive = {
"registry": "docker.io",
|
<|file_name|>limitedSurfaceInterpolationScheme.hh<|end_file_name|><|fim▁begin|>// pythonFlu - Python wrapping for OpenFOAM C++ API<|fim▁hole|>// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
// See http://sourceforge.net/projects/pythonflu
//
// Author : Alexey PETROV
//---------------------------------------------------------------------------
#ifndef limitedSurfaceInterpolationScheme_hh
#define limitedSurfaceInterpolationScheme_hh
//---------------------------------------------------------------------------
#include "Foam/src/finiteVolume/interpolation/surfaceInterpolation/surfaceInterpolationScheme/surfaceInterpolationScheme.hh"
#include <limitedSurfaceInterpolationScheme.H>
//---------------------------------------------------------------------------
#endif<|fim▁end|>
|
// Copyright (C) 2010- Alexey Petrov
// Copyright (C) 2009-2010 Pebble Bed Modular Reactor (Pty) Limited (PBMR)
//
// This program is free software: you can redistribute it and/or modify
|
<|file_name|>sprite_sheet.py<|end_file_name|><|fim▁begin|>""" A SpriteSheet is the overall collection of individual frames (which may be spread across files) that define one layer of the final sprite.
"""
from models.sprite_action import SpriteAction
class SpriteSheet():
def __init__( self, data, group_name ):
if "file_path" not in data:
raise "file_path element missing in layer. Unable to load .spec file if we don't know which sheet you mean"
<|fim▁hole|> self.layer = data.get( "layer", "Unspecified Layer" )
self.credit_name = data.get( "credit_name", "Unknown Artist" )
self.credit_url = data.get( "credit_url", "" )
self.license = data.get( "license", "Not specified (do not use this artwork without explicit written permission from the artist!)" )
self.actions = {}
avail_actions = data.get( "actions", [] )
for action_data in avail_actions:
new_action = SpriteAction( action_data )
self.actions[new_action.name] = new_action<|fim▁end|>
|
self.file_path = data.get( "file_path" )
self.group_name = group_name
self.name = data.get( "name", "Unnamed Layer" )
|
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { CarbonIconProps } from "../../";
declare const Opacity24: React.ForwardRefExoticComponent<<|fim▁hole|><|fim▁end|>
|
CarbonIconProps & React.RefAttributes<SVGSVGElement>
>;
export default Opacity24;
|
<|file_name|>version.go<|end_file_name|><|fim▁begin|>// santiago - webhook dispatching service
// https://github.com/topfreegames/santiago
// Licensed under the MIT license:<|fim▁hole|>
package metadata
//VERSION identifies current version of the application
var VERSION = "1.2.0"<|fim▁end|>
|
// http://www.opensource.org/licenses/mit-license
// Copyright © 2016 Top Free Games <[email protected]>
|
<|file_name|>functions_7.js<|end_file_name|><|fim▁begin|>var searchData=
[
['save_5fcomments',['save_comments',['../useful__functions_8php.html#af56aec073a82606e9a7dac498de28d96',1,'useful_functions.php']]],<|fim▁hole|> ['signup_5fexists',['signup_exists',['../useful__functions_8php.html#a7cf6d3ac90a6fca8c3f595e68b6e62cc',1,'useful_functions.php']]]
];<|fim▁end|>
|
['save_5fexperiment_5flist',['save_experiment_list',['../choose__experiments_8php.html#a5d24f39ae6c336d7828523216bce6fae',1,'choose_experiments.php']]],
['save_5fsessions',['save_sessions',['../useful__functions_8php.html#a38a4632f417ceaa2f1c09c3ed0494d5e',1,'useful_functions.php']]],
['save_5fsignup_5fto_5fdb',['save_signup_to_db',['../useful__functions_8php.html#a0dca9d754b1a0d7b5401c446878844c5',1,'useful_functions.php']]],
['save_5fuser_5fexpt_5fchoices',['save_user_expt_choices',['../useful__functions_8php.html#a461a04526110df604708381a9eac7da8',1,'useful_functions.php']]],
|
<|file_name|>agent.py<|end_file_name|><|fim▁begin|>"""
Due is a learning, modular, action-oriented dialogue agent. `Agents` are the
entities that can take part in Episodes (:mod:`due.episode`), receiving and
issuing Events (:mod:`due.event`).
"""
import uuid
from abc import ABCMeta, abstractmethod
from datetime import datetime
from due.event import Event
from due import episode
from due.util.python import dynamic_import
class Agent(metaclass=ABCMeta):
"""
Participants in an Episodes are called Agents. An Agent models an unique
identity through its ID, and can be served on a number of channels using
packages in :mod:`due.serve`.
Most importantly, Agent classes implement Natural Language Understanding
(NLU) and Generation (NLG) models, which are the core of the whole
conversational experience; they are meant to learn from Episodes coming from
a corpus, as well as from live conversations with humans or other agents.
:param agent_id: an unique ID for the Agent
:type agent_id: `str`
:param name: a human-friendly name for the Agent
:type name: `str`
"""
def __init__(self, agent_id=None):
self.id = agent_id if agent_id is not None else str(uuid.uuid1())
@abstractmethod
def save(self):
"""<|fim▁hole|> :mod:`due.persistence` module.
A saved Agent must be a dictionary containing exactly the following items:
* `version`: version of the class who saved the agent (often `due.__version__`)
* `class`: absolute import name of the Agent class (eg. `due.models.dummy.DummyAgent`)
* `data`: saved agent data. Will be passed to the Agent constructor's `_data` parameter
:return: an object representing the Agent
:rtype: object
"""
pass
@staticmethod
def load(saved_agent):
"""
Loads an Agent from an object that was produced with the :meth:`Agent.save`
method.
:param saved_agent: an Agent, as it was saved by :meth:`Agent.save`
:type saved_agent: object
:return: an Agent
:rtype: `due.agent.Agent`
"""
class_ = dynamic_import(saved_agent['class'])
return class_(_data=saved_agent['data'])
@abstractmethod
def learn_episodes(self, episodes):
"""
Submit a list of Episodes for the :class:`Agent` to learn.
:param episodes: a list of episodes
:type episodes: `list` of :class:`due.episode.Episode`
"""
pass
def learn_episode(self, episode):
"""
Submit an Episode for the Agent to learn. By default, this just wraps a
call to :meth:`Agent.learn_episode`
:param episode: an Episode
:type episode: :class:`due.episode.Episode`
"""
self.learn_episodes([episode])
@abstractmethod
def new_episode_callback(self, new_episode):
"""
This is a callback method that is invoked whenever the Agent is invited
to join a new conversation (Episode) with another one.
Note that this is an **abstract method**: subclasses of :class:`Agent`
must implement their own.
:param new_episode: the new Episode that the other Agent has created
:type new_episode: :class:`due.episode.Episode`
"""
pass
def start_episode(self, other):
"""
Create a new :class:`due.episode.Episode` to engage another Agent in a
new conversation.
:param other_agent: The Agent you are inviting to the conversation.
:type other_agent: :class:`due.agent.Agent`
:return: a new Episode object
:rtype: :class:`due.episode.LiveEpisode`
"""
result = episode.LiveEpisode(self, other)
other.new_episode_callback(result)
return result
def event_callback(self, event, episode):
"""
This is a callback method that is invoked whenever a new Event is acted
in an Episode. This method acts as a proxy to specific Event type
handlers:
* :meth:`Agent.utterance_callback` (:class:`due.event.Event.Type.Utterance`)
* :meth:`Agent.action_callback` (:class:`due.event.Event.Type.Action`)
* :meth:`Agent.leave_callback` (:class:`due.event.Event.Type.Leave`)
:param event: The new Event
:type event: :class:`due.event.Event`
:param episode: The Episode where the Event was acted
:type episode: :class:`due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
if event.type == Event.Type.Utterance:
result = self.utterance_callback(episode)
elif event.type == Event.Type.Action:
result = self.action_callback(episode)
elif event.type == Event.Type.Leave:
result = self.leave_callback(episode)
if not result:
result = []
return result
@abstractmethod
def utterance_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Utterance
Event is acted in an Episode.
:param episode: the Episode where the Utterance was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
@abstractmethod
def action_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Action Event
is acted in an Episode.
:param episode: the Episode where the Action was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
@abstractmethod
def leave_callback(self, episode):
"""
This is a callback method that is invoked whenever a new Leave Event is
acted in an Episode.
:param episode: the Episode where the Leave Event was acted
:type episode: `due.episode.Episode`
:return: A list of response Events
:rtype: `list` of :class:`due.event.Event`
"""
pass
def act_events(self, events, episode):
"""
Act a sequence of Events in the given Episode.
:param events: a list of Events
:type events: `list` of :class:`due.event.Event`
:param episode: an Episode
:type episode: :class:`due.episode.Episode`
"""
for e in events:
if e.type == Event.Type.Action:
e.payload.run()
episode.add_event(e)
def say(self, sentence, episode):
"""
Create an Event out of the given sentence and act the new Event in
the given Episode. :class:`Agent` subclassed may need to extend this
implementation with some output operation (eg. print on screen,
broadcast to a jabber chat...).
:param sentence: A sentence
:type sentence: :class:`str`
:param episode: An Episode
:type episode: :class:`due.episode.Episode`
"""
utterance_event = Event(Event.Type.Utterance, datetime.now(), self.id, sentence)
episode.add_event(utterance_event)
def do(self, action, episode):
"""
Create an Event out of the given Action and acts the new Event in the
given Episode.
:param action: An Action
:type action: :class:`due.action.Action`
"""
action.run()
action_event = Event(Event.Type.Action, datetime.now(), self.id, action)
episode.add_event(action_event)
def leave(self, episode):
"""
Acts a new Leave Event in the given Episode.
:param episode: One of the Agent's active episodes
:type episode: :class:`due.episode.Episode`
"""
leave_event = Event(Event.Type.Leave, datetime.now(), self.id, None)
episode.add_event(leave_event)
def __str__(self):
return f"<Agent: {self.id}>"<|fim▁end|>
|
Returns the Agent as an object. This object can be loaded with
:func:`Agent.load` and can be (de)serialized using the
|
<|file_name|>generic.rs<|end_file_name|><|fim▁begin|>use rustc_serialize;
use quick_csv::Csv;
use csv;
use nn::{NN, HaltCondition};
use std::clone::Clone;
use std::cmp::Eq;
use std::collections::HashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use std::fmt::Debug;
use rustc_serialize::{Decodable, Encodable};
use std::marker::PhantomData;
use std::hash::Hash;
use rand::{thread_rng, Rng};
use distance::*;
use utilities::abs_standard_deviation;
use rm::learning::svm::SVM;
use rm::learning::SupModel;
use rm::learning::toolkit::kernel::HyperTan;
use rm::linalg::Matrix;
use rm::linalg::Vector;
pub trait Record<U>
where U: Clone + Eq + Debug + Hash
{
fn record_len() -> usize;
fn data_at(&self, index: usize) -> f32;
fn standarize_field(&mut self, index: usize, asd_median: &(f32, f32));
fn values(&self) -> Vec<f32>;
fn values_f64(&self) -> Vec<f64>;
fn set_values(&mut self, Vec<f32>);
fn get_class(&self) -> U;
}
#[derive(Debug, RustcDecodable, RustcEncodable)]
pub struct MpgRecord<U> {
class: U,
values: [f32; 5],
}
impl<U> Clone for MpgRecord<U>
where U: Clone
{
fn clone(&self) -> MpgRecord<U> {
MpgRecord::<U> {
class: self.class.clone(),
values: self.values.clone(),
}
}
}
impl<U> Record<U> for MpgRecord<U>
where U: Clone + Eq + Debug + Hash
{
fn record_len() -> usize {
5
}
fn data_at(&self, index: usize) -> f32 {
self.values[index]
}
fn standarize_field(&mut self, index: usize, asd_median: &(f32, f32)) {
self.values[index] = (self.values[index] - asd_median.1) / asd_median.0;
}
fn values(&self) -> Vec<f32> {
self.values.to_vec()
}
fn values_f64(&self) -> Vec<f64> {
self.values
.to_vec()
.iter()
.map(|x| *x as f64)
.collect()
}
fn get_class(&self) -> U {
self.class.clone()
}
fn set_values(&mut self, values: Vec<f32>) {
for i in 0..self.values.len() {
self.values[i] = values[i];
}
}
}
#[derive(Debug, RustcDecodable, RustcEncodable)]
pub struct IrisRecord<U> {
class: U,
values: [f32; 4],
}
impl<U> Clone for IrisRecord<U>
where U: Clone
{
fn clone(&self) -> IrisRecord<U> {
IrisRecord::<U> {
class: self.class.clone(),
values: self.values.clone(),
}
}
}
impl<U> Record<U> for IrisRecord<U>
where U: Clone + Eq + Debug + Hash
{
fn record_len() -> usize {
4
}
fn data_at(&self, index: usize) -> f32 {
self.values[index]
}
fn standarize_field(&mut self, index: usize, asd_median: &(f32, f32)) {
self.values[index] = (self.values[index] - asd_median.1) / asd_median.0;
}
fn values(&self) -> Vec<f32> {
self.values.to_vec()
}
fn values_f64(&self) -> Vec<f64> {
self.values
.to_vec()
.iter()
.map(|x| *x as f64)
.collect()
}
fn get_class(&self) -> U {
self.class.clone()
}
fn set_values(&mut self, values: Vec<f32>) {
for i in 0..self.values.len() {
self.values[i] = values[i];
}
}
}
#[derive(Debug, RustcDecodable, RustcEncodable)]
pub struct PirsonRecord<U> {
class: U,
values: Vec<f32>,
}
impl<U> Clone for PirsonRecord<U>
where U: Clone
{
fn clone(&self) -> PirsonRecord<U> {
PirsonRecord::<U> {
class: self.class.clone(),
values: self.values.clone(),
}
}
}
impl<U> Record<U> for PirsonRecord<U>
where U: Clone + Eq + Debug + Hash
{
fn record_len() -> usize {
1423
}
fn data_at(&self, index: usize) -> f32 {
self.values[index]
}
fn standarize_field(&mut self, index: usize, asd_median: &(f32, f32)) {
self.values[index] = (self.values[index] - asd_median.1) / asd_median.0;
}
fn values(&self) -> Vec<f32> {
self.values.to_vec()
}
fn values_f64(&self) -> Vec<f64> {
self.values
.to_vec()
.iter()
.map(|x| *x as f64)
.collect()
}
fn get_class(&self) -> U {
self.class.clone()
}
fn set_values(&mut self, values: Vec<f32>) {
for i in 0..self.values.len() {
self.values[i] = values[i];
}
}
}
#[derive(Debug)]
pub enum TrainingMethod {
NearestNeighbors,
NeuralNetwork,
SVM,
}
use self::TrainingMethod::*;
#[derive(Debug)]
pub struct Database<T, U>
where T: Record<U> + Clone + Encodable,
U: Clone + Eq + Debug + Hash
{
data: Vec<T>,
abs_sd: Vec<(f32, f32)>,
classifier: HashMap<U, usize>,
net: NN,
// svm_mod: SVM,
phantom: PhantomData<U>,
}
impl<T, U> Database<T, U>
where T: Decodable + Debug + Record<U> + Clone + Encodable,
U: Clone + Eq + Debug + Hash
{
pub fn new() -> Database<T, U> {
Database {
data: Vec::new(),
abs_sd: Vec::new(),
classifier: HashMap::new(),
net: NN::new(&[T::record_len() as u32, 10, 9]),
// svm_mod: SVM::default(),
phantom: PhantomData,
}
}
pub fn from_file(path: &str) -> Database<T, U> {
let rdr = Csv::from_file(path).unwrap().has_header(true);
let mut data: Vec<T> = Vec::new();
for row in rdr.into_iter() {
match row.unwrap().decode::<T>() {
Ok(cols) => data.push(cols),
Err(error) => println!("{}", error),
}
}
Database {
data: data,
abs_sd: Vec::new(),
classifier: HashMap::new(),
net: NN::new(&[T::record_len() as u32, 10, 9]),
// svm_mod: SVM::default(),
phantom: PhantomData,
}
}
pub fn add_file(&mut self, path: &str) {
let rdr = Csv::from_file(path).unwrap();
for row in rdr.into_iter() {
match row.unwrap().decode::<T>() {
Ok(cols) => self.data.push(cols),
Err(error) => println!("{}", error),
}
}
}
pub fn standarize(&mut self) {
println!("Standarizing DB...");
let record_len = T::record_len();
let mut mult_feat_vec = vec![Vec::<f32>::new(); record_len];
for rcrd in self.data.iter() {
for i in 0..record_len {
mult_feat_vec[i].push(rcrd.data_at(i));
}
}
let mut i = 0;
for feat_vec in mult_feat_vec.iter() {
let asd_median_tup = abs_standard_deviation(&feat_vec);
println!("\t{}> asd: {}\tmedian: {}",
i,
asd_median_tup.0,
asd_median_tup.1);
for rcrd in self.data.iter_mut() {
rcrd.standarize_field(i, &asd_median_tup);
}
self.abs_sd.push(asd_median_tup);
i += 1;
}
}
fn nearest_neighbors(&self, rcrd: &T, func: fn(&Vec<f32>, &Vec<f32>) -> f32) -> Vec<usize> {
let mut distances: Vec<(f32, usize)> = Vec::new();
let mut i = 0;
for record in self.data.iter() {
distances.push((func(&rcrd.values(), &record.values()), i));
i += 1;
}
distances.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());
let indexes = distances.into_iter().map(|x| x.1).collect();
indexes
}
pub fn standarize_record(&self, record: &mut T) {
for i in 0..T::record_len() {
record.standarize_field(i, &self.abs_sd[i]);
}
}
pub fn predict_knn(&self, record: &T, k: usize) -> U {
let mut counts: HashMap<U, usize> = HashMap::new();
for i in 0..k {
let counter = counts
.entry(self.data[self.nearest_neighbors(&record, pearson_coef)[i]].get_class())
.or_insert(0);
*counter += 1;
}
let mut p_class = self.data[self.nearest_neighbors(&record, pearson_coef)[0]].get_class();
let mut gtr_count = 1;
for (class, count) in &counts {
if *count > gtr_count {
p_class = class.clone();
}
}
p_class
}
pub fn classify(&mut self) {
let mut class = 0;
for record in self.data.iter() {
match self.classifier.entry(record.get_class()) {
Vacant(entry) => {
entry.insert(class);
class += 1;
}
_ => {}
}
}
}
pub fn get_class_index(&self, record: &T) -> usize {
*self.classifier.get(&record.get_class()).unwrap()
}
pub fn get_class_from_index(&self, class_val: usize) -> U {
for (class, val) in &self.classifier {
if class_val == *val {
return class.clone();
}
}
self.data[0].get_class().clone()
}
pub fn predict_nn(&mut self, record: &T) -> U {
let pred = self.net.run(&record.values_f64());
println!("{:?}", pred);
let mut indx = 0;
for i in 1..pred.len() {
if pred[indx] < pred[i] {
indx = i;
}<|fim▁hole|> self.get_class_from_index(indx)
}
pub fn predict_svm(&self, record: &T) -> U {
unimplemented!()
}
pub fn count_classes(&self) -> HashMap<U, usize> {
let mut counts: HashMap<U, usize> = HashMap::new();
for record in self.data.iter() {
let counter = counts.entry(record.get_class()).or_insert(0);
*counter += 1;
}
counts
}
pub fn segment(&self, n: usize, prefix: &str) {
let mut record_transfers: Vec<Vec<usize>> = vec![Vec::new(); n];
let mut rng = thread_rng();
let mut i = 0;
for record in self.data.iter() {
record_transfers[rng.gen_range(0, n)].push(i);
i += 1;
}
i = 1;
for item in record_transfers.iter() {
let path = format!("../../data/cross-validation/{}-{number:>0width$}",
prefix,
number = i,
width = 2);
let str_path: &str = path.as_ref();
let mut wtr = csv::Writer::from_file(str_path).unwrap();
for record in item {
wtr.encode(self.data[*record].clone());
}
i += 1;
}
}
pub fn cross_validation(training_path: &str,
n: usize,
prefix: &str,
segment: bool,
training: TrainingMethod) {
/*******SEGMENTATION******/
if segment {
let mut db = Database::<T, U>::from_file(training_path);
db.segment(n, prefix);
}
/*************************/
let mut precision = 0.0;
let mut prec_vec = Vec::new();
for j in 1..n + 1 {
let mut db = Database::<T, U>::new();
for i in 1..n + 1 {
if i != j {
db.add_file(format!("../../data/cross-validation/{}-{number:>0width$}",
prefix,
number = i,
width = 2)
.as_ref());
}
}
db.standarize();
let path = format!("../../data/cross-validation/{}-{number:>0width$}",
prefix,
number = j,
width = 2);
let mut test_db = Database::<T, U>::from_file(path.as_ref());
let mut confusion_counts: HashMap<U, HashMap<U, usize>> = HashMap::new();
let mut n_correct = 0;
let mut n_incorrect = 0;
let mut count = 0;
if let NeuralNetwork = training {
db.classify();
let mut values: Vec<(Vec<f64>, Vec<f64>)> = Vec::new();
for rcrd in db.data.iter() {
let mut class = vec![0.0; 9];
// println!("Here!: {}", db.get_class_index(rcrd));
class[db.get_class_index(rcrd)] = 1.0;
// println!("{} vs {}", rcrd.values_f64().len(), T::record_len());
values.push((rcrd.values_f64(), class));
}
// println!("Exited");
db.net
.train(&values)
.halt_condition(HaltCondition::Epochs(100000))
.log_interval(Some(1000))
.momentum(0.1)
.rate(0.3)
.go();
}
if let SVM = training {
// let inputs = Matrix::new(T::record_len(), db.data.len(), vec![1.0,3.0,5.0,7.0]);
// let targets = Vector::new(vec![-1.,-1.,1.,1.]);
// // Train the model
// self.svm_mod.train(&inputs, &targets).unwrap();
}
for mut record in test_db.data.iter_mut() {
db.standarize_record(&mut record);
let class = record.get_class();
let pred = match training {
NearestNeighbors => db.predict_knn(&record, 3),
NeuralNetwork => db.predict_nn(&record),
SVM => db.predict_knn(&record, 3),
};
if class == pred {
n_correct += 1;
} else {
n_incorrect += 1;
}
match confusion_counts.entry(class) {
Vacant(entry) => {
let mut class_count = HashMap::new();
class_count.insert(pred, 1);
entry.insert(class_count);
}
Occupied(mut entry) => {
let counter = entry.get_mut().entry(pred).or_insert(1);
*counter += 1;
}
}
count += 1;
}
println!("\nTestings for: {}\n\
Accuracy: {}%\n\
Confusion Matrix===========",
path,
n_correct as f32 * 100.0 / count as f32);
for (act_class, counts) in &confusion_counts {
print!(" {:?} >", act_class);
for (pred_class, count) in counts {
print!("\t{:?}: {}", pred_class, count);
}
println!("");
}
println!("===========================\n");
precision += n_correct as f32 * 100.0 / count as f32;
prec_vec.push(n_correct as f32 * 100.0 / count as f32);
}
precision /= n as f32;
println!("{:?}", prec_vec);
println!("Avg accuracy: {}%", precision);
}
}<|fim▁end|>
|
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//
// SOS: the Stupid Operating System
// by Eliza Weisman ([email protected])
//
// Copyright (c) 2015-2017 Eliza Weisman
// Released under the terms of the MIT license. See `LICENSE` in the root
// directory of this repository for more information.
//
//! Architecture-specific memory management.<|fim▁hole|>
use core::{fmt, ops, mem};
pub const PAGE_SHIFT: u8 = 12;
/// The size of a page (4KiB), in bytes
pub const PAGE_SIZE: u64 = 1 << PAGE_SHIFT; // 4k
/// The size of a large page (2MiB) in bytes
pub const LARGE_PAGE_SIZE: u64 = 1024 * 1024 * 2;
/// The size of a huge page (2GiB) in bytes
pub const HUGE_PAGE_SIZE: u64 = 1024 * 1024 * 1024;
macro_attr! {
/// A physical (linear) memory address is a 64-bit unsigned integer
#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Addr!(u64, 'P'))]
#[repr(C)]
pub struct PAddr(u64);
}
macro_attr! {
/// A frame (physical page)
// TODO: consider renaming this to `Frame` (less typing)?
// - eliza, 2/28/2017
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Page!(PAddr) )]
pub struct PhysicalPage { pub number: u64 }
}
impl fmt::Debug for PhysicalPage {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "frame #{} at {:#p}", self.number, self.base_addr())
}
}
impl ops::Add<usize> for PhysicalPage {
type Output = Self;
#[inline] fn add(self, rhs: usize) -> Self {
PhysicalPage { number: self.number + rhs as u64 }
}
}
impl ops::Sub<usize> for PhysicalPage {
type Output = Self;
#[inline] fn sub(self, rhs: usize) -> Self {
PhysicalPage { number: self.number - rhs as u64 }
}
}
impl ops::AddAssign<usize> for PhysicalPage {
#[inline] fn add_assign(&mut self, rhs: usize) {
self.number += rhs as u64;
}
}
impl ops::SubAssign<usize> for PhysicalPage {
#[inline] fn sub_assign(&mut self, rhs: usize) {
self.number -= rhs as u64;
}
}
impl PhysicalPage {
/// Returns the physical address where this frame starts.
#[inline]
pub const fn base_addr(&self) -> PAddr {
PAddr(self.number << PAGE_SHIFT)
}
/// Returns a new frame containing `addr`
#[inline]
pub const fn containing_addr(addr: PAddr) -> PhysicalPage {
PhysicalPage { number: addr.0 >> PAGE_SHIFT }
}
/// Convert the frame into a raw pointer to the frame's base address
#[inline]
pub unsafe fn as_ptr<T>(&self) -> *const T {
mem::transmute(self.base_addr())
}
/// Convert the frame into a raw mutable pointer to the frame's base address
#[inline]
pub unsafe fn as_mut_ptr<T>(&self) -> *mut T {
*self.base_addr() as *mut u8 as *mut T
}
}<|fim▁end|>
|
use ::{Addr, Page};
|
<|file_name|>mask.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="mask" products="gecko"
sub_properties="mask-mode mask-repeat mask-clip mask-origin mask-composite mask-position
mask-size mask-image">
use properties::longhands::{mask_mode, mask_repeat, mask_clip, mask_origin, mask_composite, mask_position};
use properties::longhands::{mask_size, mask_image};
impl From<mask_origin::single_value::SpecifiedValue> for mask_clip::single_value::SpecifiedValue {
fn from(origin: mask_origin::single_value::SpecifiedValue) -> mask_clip::single_value::SpecifiedValue {
match origin {
mask_origin::single_value::SpecifiedValue::content_box =>
mask_clip::single_value::SpecifiedValue::content_box,
mask_origin::single_value::SpecifiedValue::padding_box =>
mask_clip::single_value::SpecifiedValue::padding_box,
mask_origin::single_value::SpecifiedValue::border_box =>
mask_clip::single_value::SpecifiedValue::border_box,
}
}
}
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
% for name in "image mode position size repeat origin clip composite".split():
let mut mask_${name} = mask_${name}::SpecifiedValue(Vec::new());
% endfor
try!(input.parse_comma_separated(|input| {
% for name in "image mode position size repeat origin clip composite".split():
let mut ${name} = None;
% endfor
loop {
if image.is_none() {
if let Ok(value) = input.try(|input| mask_image::single_value
::parse(context, input)) {
image = Some(value);
// Parse mask mode, if applicable.
mode = input.try(|input| mask_mode::single_value::parse(context, input)).ok();
continue
}
}
if position.is_none() {
if let Ok(value) = input.try(|input| mask_position::single_value
::parse(context, input)) {
position = Some(value);
// Parse mask size, if applicable.
size = input.try(|input| {
try!(input.expect_delim('/'));
mask_size::single_value::parse(context, input)
}).ok();
continue
}
}
% for name in "repeat origin clip composite".split():
if ${name}.is_none() {
if let Ok(value) = input.try(|input| mask_${name}::single_value
::parse(context, input)) {
${name} = Some(value);
continue
}
}
% endfor
break
}
if clip.is_none() {
if let Some(origin) = origin {
clip = Some(mask_clip::single_value::SpecifiedValue::from(origin));
}
}
let mut any = false;
% for name in "image mode position size repeat origin clip composite".split():
any = any || ${name}.is_some();
% endfor
if any {
% for name in "image mode position size repeat origin clip composite".split():
if let Some(m_${name}) = ${name} {
mask_${name}.0.push(m_${name});
} else {
mask_${name}.0.push(mask_${name}::single_value
::get_initial_specified_value());
}
% endfor
Ok(())
} else {
Err(())
}
}));
Ok(Longhands {
% for name in "image mode position size repeat origin clip composite".split():
mask_${name}: Some(mask_${name}),
% endfor
})
}
impl<'a> LonghandsToSerialize<'a> {
fn to_css_declared<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
// mako doesn't like ampersands following `<`
fn extract_value<T>(x: &DeclaredValue<T>) -> Option< &T> {
match *x {
DeclaredValue::Value(ref val) => Some(val),
_ => None,
}
}
use std::cmp;
let mut len = 0;
% for name in "image mode position size repeat origin clip composite".split():
len = cmp::max(len, extract_value(self.mask_${name}).map(|i| i.0.len())
.unwrap_or(0));
% endfor
// There should be at least one declared value
if len == 0 {
return dest.write_str("")
}
for i in 0..len {
% for name in "image mode position size repeat origin clip composite".split():
let ${name} = if let DeclaredValue::Value(ref arr) = *self.mask_${name} {
arr.0.get(i % arr.0.len())
} else {
None
};
% endfor
if let Some(image) = image {
try!(image.to_css(dest));
} else {
try!(write!(dest, "none"));
}
try!(write!(dest, " "));
if let Some(mode) = mode {
try!(mode.to_css(dest));
} else {
try!(write!(dest, "match-source"));
}
try!(write!(dest, " "));
try!(position.unwrap_or(&mask_position::single_value
::get_initial_specified_value())
.to_css(dest));
if let Some(size) = size {
try!(write!(dest, " / "));
try!(size.to_css(dest));
}
try!(write!(dest, " "));
if let Some(repeat) = repeat {
try!(repeat.to_css(dest));
} else {
try!(write!(dest, "repeat"));
}
match (origin, clip) {
(Some(origin), Some(clip)) => {
use properties::longhands::mask_origin::single_value::computed_value::T as Origin;
use properties::longhands::mask_clip::single_value::computed_value::T as Clip;
try!(write!(dest, " "));
match (origin, clip) {
(&Origin::padding_box, &Clip::padding_box) => {
try!(origin.to_css(dest));
},
(&Origin::border_box, &Clip::border_box) => {
try!(origin.to_css(dest));
},
(&Origin::content_box, &Clip::content_box) => {
try!(origin.to_css(dest));
},
_ => {
try!(origin.to_css(dest));
try!(write!(dest, " "));
try!(clip.to_css(dest));
}
}<|fim▁hole|> };
try!(write!(dest, " "));
if let Some(composite) = composite {
try!(composite.to_css(dest));
} else {
try!(write!(dest, "add"));
}
}
Ok(())
}
}
</%helpers:shorthand><|fim▁end|>
|
},
_ => {}
|
<|file_name|>Face.py<|end_file_name|><|fim▁begin|>import pygame
class Face(pygame.sprite.Sprite):
def __init__(self, imagePaths, rect, player):
pygame.sprite.Sprite.__init__(self)
self.imagePath = imagePaths
self.images = {}
self.rect = pygame.Rect(rect)
self.player = player
self.stateCallback = player.stateOfMind
def update(self):
state = self.stateCallback()
if state not in self.images:
self.images[state] = pygame.image.load(self.imagePath[state]).convert_alpha()
iwidth, iheight = self.images[state].get_size()
width = self.rect.width
height = int(1.0 * width / iwidth * iheight)
self.images[state] = pygame.transform.smoothscale(self.images[state], (width, height))
self.image = self.images[state]
def setCallback(self, cb):<|fim▁hole|><|fim▁end|>
|
self.stateCallback = cb
|
<|file_name|>move-4.rs<|end_file_name|><|fim▁begin|>// run-pass
#![allow(dead_code)]
struct Triple { a: isize, b: isize, c: isize }
fn test(foo: Box<Triple>) -> Box<Triple> {
let foo = foo;
let bar = foo;
let baz = bar;
let quux = baz;
return quux;
}
pub fn main() {
let x = Box::new(Triple{ a: 1, b: 2, c: 3 });
let y = test(x);<|fim▁hole|> assert_eq!(y.c, 3);
}<|fim▁end|>
| |
<|file_name|>secret.go<|end_file_name|><|fim▁begin|>package authorizer
import (
"context"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/kit/platform"
)
var _ influxdb.SecretService = (*SecretService)(nil)
// SecretService wraps a influxdb.SecretService and authorizes actions
// against it appropriately.
type SecretService struct {
s influxdb.SecretService
}
// NewSecretService constructs an instance of an authorizing secret service.
func NewSecretService(s influxdb.SecretService) *SecretService {
return &SecretService{
s: s,
}
}
// LoadSecret checks to see if the authorizer on context has read access to the secret key provided.
func (s *SecretService) LoadSecret(ctx context.Context, orgID platform.ID, key string) (string, error) {
if _, _, err := AuthorizeOrgReadResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return "", err
}
secret, err := s.s.LoadSecret(ctx, orgID, key)
if err != nil {
return "", err
}
return secret, nil
}
// GetSecretKeys checks to see if the authorizer on context has read access to all the secrets belonging to orgID.
func (s *SecretService) GetSecretKeys(ctx context.Context, orgID platform.ID) ([]string, error) {
if _, _, err := AuthorizeOrgReadResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return []string{}, err
}
secrets, err := s.s.GetSecretKeys(ctx, orgID)
if err != nil {
return []string{}, err
}
return secrets, nil
}
// PutSecret checks to see if the authorizer on context has write access to the secret key provided.
func (s *SecretService) PutSecret(ctx context.Context, orgID platform.ID, key string, val string) error {
if _, _, err := AuthorizeCreate(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return err
}
err := s.s.PutSecret(ctx, orgID, key, val)
if err != nil {
return err
}
return nil
}
// PutSecrets checks to see if the authorizer on context has read and write access to the secret keys provided.
func (s *SecretService) PutSecrets(ctx context.Context, orgID platform.ID, m map[string]string) error {
// PutSecrets operates on intersection between m and keys beloging to orgID.
// We need to have read access to those secrets since it deletes the secrets (within the intersection) that have not be overridden.
if _, _, err := AuthorizeOrgReadResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return err
}
if _, _, err := AuthorizeOrgWriteResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {<|fim▁hole|> return err
}
err := s.s.PutSecrets(ctx, orgID, m)
if err != nil {
return err
}
return nil
}
// PatchSecrets checks to see if the authorizer on context has write access to the secret keys provided.
func (s *SecretService) PatchSecrets(ctx context.Context, orgID platform.ID, m map[string]string) error {
if _, _, err := AuthorizeOrgWriteResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return err
}
err := s.s.PatchSecrets(ctx, orgID, m)
if err != nil {
return err
}
return nil
}
// DeleteSecret checks to see if the authorizer on context has write access to the secret keys provided.
func (s *SecretService) DeleteSecret(ctx context.Context, orgID platform.ID, keys ...string) error {
if _, _, err := AuthorizeOrgWriteResource(ctx, influxdb.SecretsResourceType, orgID); err != nil {
return err
}
err := s.s.DeleteSecret(ctx, orgID, keys...)
if err != nil {
return err
}
return nil
}<|fim▁end|>
| |
<|file_name|>test_lexical_syntactic_featurizer.py<|end_file_name|><|fim▁begin|>import numpy as np
import pytest
import re
from typing import Text, Dict, Any, Callable, List, Optional, Union
from rasa.engine.graph import ExecutionContext
from rasa.engine.storage.storage import ModelStorage
from rasa.engine.storage.resource import Resource
from rasa.nlu.constants import (
DENSE_FEATURIZABLE_ATTRIBUTES,
MESSAGE_ATTRIBUTES,
TOKENS_NAMES,
)
from rasa.nlu.tokenizers.spacy_tokenizer import POS_TAG_KEY
from rasa.nlu.featurizers.sparse_featurizer.lexical_syntactic_featurizer import (
LexicalSyntacticFeaturizer,
FEATURES,
)
from rasa.shared.nlu.training_data.training_data import TrainingData
from rasa.shared.nlu.training_data.message import Message
from rasa.shared.nlu.constants import FEATURE_TYPE_SEQUENCE, TEXT
from rasa.shared.exceptions import InvalidConfigException
from rasa.nlu.tokenizers.tokenizer import Token
@pytest.fixture
def resource_lexical_syntactic_featurizer() -> Resource:
return Resource("LexicalSyntacticFeaturizer")
@pytest.fixture
def create_lexical_syntactic_featurizer(
default_model_storage: ModelStorage,
default_execution_context: ExecutionContext,
resource_lexical_syntactic_featurizer: Resource,
) -> Callable[[Dict[Text, Any]], LexicalSyntacticFeaturizer]:
def inner(config: Dict[Text, Any]):
return LexicalSyntacticFeaturizer.create(
config={**LexicalSyntacticFeaturizer.get_default_config(), **config,},
model_storage=default_model_storage,
execution_context=default_execution_context,
resource=resource_lexical_syntactic_featurizer,
)
return inner
@pytest.mark.parametrize(
"sentence,part_of_speech,feature_config,expected_features",
[
# simple example 1
(
"hello goodbye hello",
None,
[["BOS", "upper"], ["BOS", "EOS", "prefix2", "digit"], ["EOS", "low"],],
[
[0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0],
[1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0],
],
),
# simple example 2
(
"a 1",
None,
[["BOS", "upper"], ["BOS", "EOS", "prefix2", "digit"], ["EOS", "low"],],
[
[0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0],
],
),
# larger window size
(
"hello 123 hello 123 hello",
None,
[["upper"], ["digit"], ["low"], ["digit"]],
[0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0],
# Note:
# 1. we just describe the features for first token here
# 2. "123".islower() == "123".isupper() == False, which is why we end
# up with 7 features
),
# with part of speech
(
"The sun is shining",
["DET", "NOUN", "AUX", "VERB"],
[["pos", "pos2"]],
[
[0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0],
],
),
],
)
def test_feature_computation(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
sentence: Text,
part_of_speech: Optional[List[Text]],
feature_config: List[List[Text]],
expected_features: List[Union[int, List[int]]],
):
featurizer = create_lexical_syntactic_featurizer(
{"alias": "lsf", "features": feature_config}
)
# build the message
tokens = [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
# ... and add part of speech tags (str) to tokens (if specified)
if part_of_speech:
assert len(tokens) == len(part_of_speech)
for token, pos in zip(tokens, part_of_speech):
token.data = {POS_TAG_KEY: pos}<|fim▁hole|>
# train
featurizer.train(TrainingData([message]))
assert not message.features
# process
featurizer.process([message])
assert len(message.features) == 1
feature = message.features[0]
assert feature.attribute == TEXT
assert feature.is_sparse()
assert feature.type == FEATURE_TYPE_SEQUENCE
assert feature.features.shape[0] == len(tokens)
if isinstance(expected_features[0], List):
assert len(expected_features) == feature.features.shape[0]
# we specified the full matrix
assert np.all(feature.features.todense() == expected_features)
else:
assert len(expected_features) == feature.features.shape[1]
# just check features for the first token
assert np.all(feature.features.todense()[0] == expected_features)
def test_features_for_messages_with_missing_part_of_speech_tags(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
):
# build the message and do NOT add part of speech information
sentence = "hello goodbye hello"
message_data = {
TOKENS_NAMES[TEXT]: [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
}
message = Message(data=message_data)
# train and process
featurizer = create_lexical_syntactic_featurizer(
{"alias": "lsf", "features": [["BOS", "pos"]]}
)
featurizer.train(TrainingData([message]))
featurizer.process([message])
feature = message.features[0]
assert feature.features.shape[1] == 3 # BOS = True/False, pos = None
def test_only_featurizes_text_attribute(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
):
# build a message with tokens for lots of attributes
sentence = "hello goodbye hello"
tokens = [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
message_data = {}
for attribute in MESSAGE_ATTRIBUTES + DENSE_FEATURIZABLE_ATTRIBUTES:
message_data[attribute] = sentence
message_data[TOKENS_NAMES[attribute]] = tokens
message = Message(data=message_data)
# train and process
featurizer = create_lexical_syntactic_featurizer(
{"alias": "lsf", "features": [["BOS"]]}
)
featurizer.train(TrainingData([message]))
featurizer.process([message])
assert len(message.features) == 1
assert message.features[0].attribute == TEXT
def test_process_multiple_messages(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
):
# build a message with tokens for lots of attributes
multiple_messages = []
for sentence in ["hello", "hello there"]:
tokens = [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
multiple_messages.append(Message(data={TOKENS_NAMES[TEXT]: tokens}))
# train and process
featurizer = create_lexical_syntactic_featurizer(
{"alias": "lsf", "features": [["prefix2"]]}
)
featurizer.train(TrainingData(multiple_messages))
featurizer.process(multiple_messages)
for message in multiple_messages:
assert len(message.features) == 1
assert message.features[0].attribute == TEXT
# we know both texts where used for training if more than one feature has been
# extracted e.g. for the first message from which only the prefix "he" can be
# extracted
assert multiple_messages[0].features[0].features.shape[-1] > 1
@pytest.mark.parametrize("feature_config", [(["pos", "BOS"],)])
def test_create_train_load_and_process(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
default_model_storage: ModelStorage,
default_execution_context: ExecutionContext,
resource_lexical_syntactic_featurizer: Resource,
feature_config: List[Text],
) -> Callable[..., LexicalSyntacticFeaturizer]:
config = {"alias": "lsf", "features": feature_config}
featurizer = create_lexical_syntactic_featurizer(config)
sentence = "Hello how are you"
tokens = [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
message = Message(data={TOKENS_NAMES[TEXT]: tokens})
featurizer.train(TrainingData([message]))
loaded_featurizer = LexicalSyntacticFeaturizer.load(
config={**LexicalSyntacticFeaturizer.get_default_config(), **config,},
model_storage=default_model_storage,
execution_context=default_execution_context,
resource=resource_lexical_syntactic_featurizer,
)
assert loaded_featurizer._feature_to_idx_dict == featurizer._feature_to_idx_dict
@pytest.mark.parametrize(
"config,raises",
[
# do not raise
({}, False),
({**LexicalSyntacticFeaturizer.get_default_config()}, False),
({FEATURES: [["suffix2"]]}, False),
(
{
"bla": "obviously an unknown extra feature",
"faeturizer": "typos are also unknown features",
},
False,
),
# raise
({FEATURES: ["pos", "suffix2"]}, True),
({FEATURES: ["suffix1234"]}, True),
],
)
def test_validate_config(config: Dict[Text, Any], raises: bool):
if not raises:
LexicalSyntacticFeaturizer.validate_config(config)
else:
with pytest.raises(InvalidConfigException):
LexicalSyntacticFeaturizer.validate_config(config)
@pytest.mark.parametrize(
"sentence, feature_config, expected_features",
[("The sun is shining", [["pos", "pos2"]], np.ones(shape=(4, 2)),),],
)
def test_warn_if_part_of_speech_features_cannot_be_computed(
create_lexical_syntactic_featurizer: Callable[
[Dict[Text, Any]], LexicalSyntacticFeaturizer
],
sentence: Text,
feature_config: Dict[Text, Any],
expected_features: np.ndarray,
):
featurizer = create_lexical_syntactic_featurizer(
{"alias": "lsf", "features": feature_config}
)
# build the message - with tokens but *no* part-of-speech tags
tokens = [
Token(text=match[0], start=match.start())
for match in re.finditer(r"\w+", sentence)
]
message = Message(data={TOKENS_NAMES[TEXT]: tokens})
# train
with pytest.warns(
UserWarning,
match="Expected training data to include tokens with part-of-speech tags",
):
featurizer.train(TrainingData([message]))
assert not message.features
# process
with pytest.warns(None) as records:
featurizer.process([message])
assert len(records) == 0
assert len(message.features) == 1
feature = message.features[0]
assert np.all(feature.features.todense() == expected_features)<|fim▁end|>
|
message = Message(data={TOKENS_NAMES[TEXT]: tokens})
|
<|file_name|>JsonRest.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2004-2011, The Dojo Foundation All Rights Reserved.
Available via Academic Free License >= 2.1 OR the modified BSD license.
see: http://dojotoolkit.org/license for details
*/
//>>built
define("dojo/store/JsonRest",["../_base/xhr","../_base/lang","../json","../_base/declare","./util/QueryResults"],function(_1,_2,_3,_4,_5){
var _6=null;
return _4("dojo.store.JsonRest",_6,{constructor:function(_7){
this.headers={};
_4.safeMixin(this,_7);
},headers:{},target:"",idProperty:"id",ascendingPrefix:"+",descendingPrefix:"-",_getTarget:function(id){
var _8=this.target;
if(typeof id!="undefined"){
if(_8.charAt(_8.length-1)=="/"){
_8+=id;
}else{
_8+="/"+id;
}
}
return _8;
},get:function(id,_9){
_9=_9||{};
var _a=_2.mixin({Accept:this.accepts},this.headers,_9.headers||_9);
return _1("GET",{url:this._getTarget(id),handleAs:"json",headers:_a});
},accepts:"application/javascript, application/json",getIdentity:function(_b){
return _b[this.idProperty];
},put:function(_c,_d){
_d=_d||{};
var id=("id" in _d)?_d.id:this.getIdentity(_c);
var _e=typeof id!="undefined";
return _1(_e&&!_d.incremental?"PUT":"POST",{url:this._getTarget(id),postData:_3.stringify(_c),handleAs:"json",headers:_2.mixin({"Content-Type":"application/json",Accept:this.accepts,"If-Match":_d.overwrite===true?"*":null,"If-None-Match":_d.overwrite===false?"*":null},this.headers,_d.headers)});
},add:function(_f,_10){
_10=_10||{};
_10.overwrite=false;
return this.put(_f,_10);
},remove:function(id,_11){
_11=_11||{};
return _1("DELETE",{url:this._getTarget(id),headers:_2.mixin({},this.headers,_11.headers)});
},query:function(_12,_13){
_13=_13||{};
var _14=_2.mixin({Accept:this.accepts},this.headers,_13.headers);
var _15=this.target.indexOf("?")>-1;
if(_12&&typeof _12=="object"){
_12=_1.objectToQuery(_12);
_12=_12?(_15?"&":"?")+_12:"";
}
if(_13.start>=0||_13.count>=0){
_14["X-Range"]="items="+(_13.start||"0")+"-"+(("count" in _13&&_13.count!=Infinity)?(_13.count+(_13.start||0)-1):"");<|fim▁hole|>}else{
_14.Range=_14["X-Range"];
}
}
if(_13&&_13.sort){
var _16=this.sortParam;
_12+=(_12||_15?"&":"?")+(_16?_16+"=":"sort(");
for(var i=0;i<_13.sort.length;i++){
var _17=_13.sort[i];
_12+=(i>0?",":"")+(_17.descending?this.descendingPrefix:this.ascendingPrefix)+encodeURIComponent(_17.attribute);
}
if(!_16){
_12+=")";
}
}
var _18=_1("GET",{url:this.target+(_12||""),handleAs:"json",headers:_14});
_18.total=_18.then(function(){
var _19=_18.ioArgs.xhr.getResponseHeader("Content-Range");
if(!_19){
_19=_18.ioArgs.xhr.getResponseHeader("X-Content-Range");
}
return _19&&(_19=_19.match(/\/(.*)/))&&+_19[1];
});
return _5(_18);
}});
});<|fim▁end|>
|
if(this.rangeParam){
_12+=(_12||_15?"&":"?")+this.rangeParam+"="+_14["X-Range"];
_15=true;
|
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
DATABASES = {
'default': {<|fim▁hole|> 'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test',
}
}
ROOT_URLCONF = 'urls'
SITE_ID = 1
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django_featurette',
)
TEMPLATE_CONTEXT_PROCESSORS += ("django.core.context_processors.request",)
SECRET_KEY = 'sk'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)<|fim▁end|>
| |
<|file_name|>UTTTError.ts<|end_file_name|><|fim▁begin|>export default class UTTTError extends Error {<|fim▁hole|> super(message);
this.code = code;
}
}<|fim▁end|>
|
constructor(message: string, private code: number) {
|
<|file_name|>server-entry.js<|end_file_name|><|fim▁begin|>import { app, router, store } from './app'
const isDev = process.env.NODE_ENV !== 'production'
export default context => {
const s = isDev && Date.now()
router.push(context.url)
const matchedComponents = router.getMatchedComponents()
if (!matchedComponents.length) {
return Promise.reject({ code: 404 })
}
return Promise.all(matchedComponents.map(component => {
if (component.preFetch) {
return component.preFetch(store)<|fim▁hole|> }
})).then(() => {
isDev && console.log(`data pre-fetch: ${Date.now() - s}ms`)
context.initialState = store.state
return app
})
}<|fim▁end|>
| |
<|file_name|>ParseUtils.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 hilemz
*
* http://www.wykop.pl/ludzie/hilemz/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pl.hilemz.yahoofinanceapi;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
/**
* <p>
* Date: 22.09.2015
* </p>
*
* <p>
* Complex set of tools useful for parsing data provided by Yahoo Finance API.
* </p>
*
* @author hilemz
*/
public class ParseUtils {
/**
* This method parse date from String object to LocalDate object.
*
* @param dateValue
* Data value in format "MM/dd/yyyy".
* @return Parsed date to LocalDate object.
*/
public static LocalDate toLocalDate(String dateValue) {
return LocalDate.parse(trimQuotes(dateValue), DateTimeFormatter.ofPattern("M/dd/yyyy"));
}
/**
* This method parse time from String object to LocalTime object.
*
* @param timeValue
* Time value in format "h:ma" - "4:00pm".
* @return Parsed time to LocalTime object.
*/
public static LocalTime toLocalTime(String timeValue) {
return LocalTime.parse(trimQuotes(timeValue).toUpperCase(), DateTimeFormatter.ofPattern("h:ma"));
}
/**
* This method parse date and time from String objects to LocalDateTime object.
*
* @param dateValue
* Data value in format "MM/dd/yyyy".
* @param timeValue
* Time value in format "K:ma" - "4:00pm".
* @return Parsed date and time to LocalDateTime object.
*/
public static LocalDateTime toLocalDateTime(String dateValue, String timeValue) {
return LocalDateTime.of(toLocalDate(dateValue), toLocalTime(timeValue));
}
/**
* This method parse date and time from String objects to UNIX EPOCH timestamp.
*
* @param dateValue
* Data value in format "MM/dd/yyyy".
* @param timeValue
* Time value in format "K:ma" - "4:00pm".
* @return Parsed date and time to UNIX EPOCH timestamp object.
*/
public static long toTimestamp(String dateValue, String timeValue) {
return toLocalDateTime(dateValue, timeValue).toInstant(ZoneOffset.UTC).toEpochMilli();
}
/**
* <p>
* This method converts {@link DataType#CHANGE_PERCENT} String object to Tuple of double objects. Example value that
* can be passed:
* </p>
*
* <p>
* "13.65 - 2.12%"
* </p>
*
* @param value
* Value for convertions.
* @return Converted value.
*/
public static Tuple<Double, Double> changePercentToTuple(String value) {
String[] elements = splitValue(value);
if (elements.length != 2 || !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+%|[0-9]+%\")$")) {
throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"13.65 - 2.12%\".");
}
return new Tuple<>(Double.parseDouble(elements[0]), Double.parseDouble(trimPercent(elements[1])));
}
/**
* <p>
* This method converts {@link DataType#LAST_TRADE_WITH_TIME} String object to Tuple of LocalTime and double
* objects. Example value that can be passed:
* </p>
*
* <p>
* "4:00pm - <b>629.25</b>"
* </p>
*
* @param value
* Value for convertions.
* @return Converted value.
*/
public static Tuple<LocalTime, Double> tradeWithTimeToTuple(String value) {
String[] elements = splitValue(value);
if (elements.length != 2
|| !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+</b>|[0-9]+</b>\")$")) {
throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"4"
+ ":00pm - <b>629.25</b>\".");
}
return new Tuple<>(toLocalTime(elements[0]), Double.parseDouble(trimBold(elements[1])));
}
/**
* <p>
* This method converts {@link DataType#DAYS_RANGE} String object to Tuple of double objects. Example value that can
* be passed:
* </p>
*
* <p>
* "627.02 - 640.00"
* </p>
*
* @param value
* Value for convertions.
* @return Converted value.
*/
public static Tuple<Double, Double> priceRangeToTuple(String value) {
String[] elements = splitValue(value);
if (elements.length != 2 || !value.matches("^(\"[0-9]+|\"-[0-9]+|[0-9]+|-[0-9]+).*-+.*([0-9]+|[0-9]+\")$")) {
throw new IllegalArgumentException("Illegal arguments. Value should be in format: \"627.02 - 640.00\".");
}
return new Tuple<>(Double.parseDouble(elements[0]), Double.parseDouble(elements[1]));
}
/**
* This method converts big number passed by Yahoo Finance API with symbols at the end. It supports M and B suffix.
*
* @param value
* Value for convertions.
* @return Converted value.
*/
public static long bigNumberToLong(String value) {
if (!isParsable(value)) {
throw new IllegalArgumentException("Illegal argument value.");
}
if (!value.matches("(([0-9]*.[0-9]*)|([0-9]*))[a-zA-Z]{1}$")) {
return Long.parseLong(value);
}
char suffix = value.charAt(value.length() - 1);
value = value.substring(0, value.length() - 1);
if (suffix == 'B') {
return (long) (Double.parseDouble(value) * 1000000000);
} else if (suffix == 'M') {
return (long) (Double.parseDouble(value) * 1000000);
}
throw new IllegalArgumentException("Number couldn't be resolved.");
}
/**
* This method converts 3.45% String object value to double object. In result return double value in form - 3.45.
*
* @param value
* Value for convertions.
* @return Converted value.
*/
public static double percentValueToDouble(String value) {
if (!isParsable(value)) {
throw new IllegalArgumentException("Illegal argument value.");
}
return Double.parseDouble(trimPercent(value));
}
/**
* This method splits passed value with delimiter "-".
*
* @param value
* Value to split.
* @return Splitted value.
*/
public static String[] splitValue(String value) {
return trimQuotes(value).split(" - ");
}
/**
* This method trim " from passed value.
*
* @param value
* Value to trim.
* @return Trimmed value.
*/
public static String trimQuotes(String value) {
if (!isParsable(value)) {
throw new IllegalArgumentException("Illegal argument value.");
}
return value.replaceAll("^\"|\"$", "");
}
/**
* This method trim % symbol from passed value.
*
* @param value
* Value to trim.
* @return Trimmed value.
*/
public static String trimPercent(String value) {
if (!isParsable(value)) {
throw new IllegalArgumentException("Illegal argument value.");
}
return value.replaceAll("%", "");
}
/**
* This method trim <code><b></b></code> from passed value.
*
* @param value
* Value to trim.
* @return Trimmed value.
*/
public static String trimBold(String value) {
if (!isParsable(value)) {
throw new IllegalArgumentException("Illegal argument value.");
}
return value.replaceAll("<b>|</b>", "");
}
/**
* This method verificate if passed value is appropriate for parsing.
*
* @param value
* Value to check.
* @return Result of verification.<|fim▁hole|> */
public static boolean isParsable(String value) {
return !(value == null || value.equals("N/A") || value.equals("") || value.equals("\"\""));
}
}<|fim▁end|>
| |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { Direction } from "./Direction";
function printValue(direction: Direction): void {
if (direction === Direction.Horizontal) {
console.log("Horizontal.");
} else if (direction === Direction.Vertical) {
console.log("Vertical.");
} else {
console.log("Unknown...");
}
}
<|fim▁hole|>printValue(Direction.Unknown);
printValue(Direction.Horizontal);
printValue(Direction.Vertical);
let direction: Direction;
direction = Direction.Unknown;
printValue(direction);<|fim▁end|>
| |
<|file_name|>adbutlerBidAdapter_spec.js<|end_file_name|><|fim▁begin|>describe('adbutler adapter tests', function () {
var expect = require('chai').expect;
var adapter = require('modules/adbutlerBidAdapter');
var adLoader = require('src/adloader');
var bidmanager = require('src/bidmanager');
describe('creation of bid url', function () {
var stubLoadScript;
beforeEach(function () {
stubLoadScript = sinon.stub(adLoader, 'loadScript');
});
afterEach(function () {
stubLoadScript.restore();
});
if (typeof ($$PREBID_GLOBAL$$._bidsReceived) === 'undefined') {
$$PREBID_GLOBAL$$._bidsReceived = [];
}
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [];
}
if (typeof ($$PREBID_GLOBAL$$._adsReceived) === 'undefined') {
$$PREBID_GLOBAL$$._adsReceived = [];
}
it('should be called', function () {
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
adapter().callBids(params);
sinon.assert.called(stubLoadScript);
});
it('should populate the keyword', function() {
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093',
keyword: 'fish'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
adapter().callBids(params);
var requestURI = stubLoadScript.getCall(0).args[0];
expect(requestURI).to.have.string(';kw=fish;');
});
it('should use custom domain string', function() {
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '107878',
zoneID: '86133',
domain: 'servedbyadbutler.com.dan.test'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
adapter().callBids(params);
var requestURI = stubLoadScript.getCall(0).args[0];
expect(requestURI).to.have.string('.dan.test');
});
});
describe('bid responses', function() {
it('should return complete bid response', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bidder: 'adbutler',
bids: [
{
bidId: '3c94018cdbf2f68-1',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093',
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210093,
cpm: 1.5,
width: 300,
height: 250,
place: 0
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidPlacementCode1 = stubAddBidResponse.getCall(0).args[0];
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidPlacementCode1).to.equal('/123456/header-bid-tag-1');
expect(bidObject1.getStatusCode()).to.equal(1);
expect(bidObject1.bidderCode).to.equal('adbutler');
expect(bidObject1.cpm).to.equal(1.5);
expect(bidObject1.width).to.equal(300);
expect(bidObject1.height).to.equal(250);
stubAddBidResponse.restore();
});
it('should return empty bid response', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68-2',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210085',
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'NO_ELIGIBLE_ADS',
zone_id: 210085,
width: 728,
height: 90,
place: 0
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidPlacementCode1 = stubAddBidResponse.getCall(0).args[0];
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidPlacementCode1).to.equal('/123456/header-bid-tag-1');
expect(bidObject1.getStatusCode()).to.equal(2);
expect(bidObject1.bidderCode).to.equal('adbutler');
stubAddBidResponse.restore();
});
it('should return empty bid response on incorrect size', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68-3',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210085',
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210085,
cpm: 1.5,
width: 728,
height: 90,
place: 0
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidObject1.getStatusCode()).to.equal(2);
stubAddBidResponse.restore();
});
it('should return empty bid response with CPM too low', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68-4',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093',
minCPM: '5.00'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210093,
cpm: 1.5,
width: 300,
height: 250,
place: 0
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidObject1.getStatusCode()).to.equal(2);
stubAddBidResponse.restore();
});
it('should return empty bid response with CPM too high', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {<|fim▁hole|> {
bidId: '3c9408cdbf2f68-5',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093',
maxCPM: '1.00'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210093,
cpm: 1.5,
width: 300,
height: 250,
place: 0
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidObject1.getStatusCode()).to.equal(2);
stubAddBidResponse.restore();
});
});
describe('ad code', function() {
it('should be populated', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68-6',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210093,
cpm: 1.5,
width: 300,
height: 250,
place: 0,
ad_code: '<img src="http://image.source.com/img" alt="" title="" border="0" width="300" height="250">'
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidObject1.getStatusCode()).to.equal(1);
expect(bidObject1.ad).to.have.length.above(1);
stubAddBidResponse.restore();
});
it('should contain tracking pixels', function() {
var stubAddBidResponse = sinon.stub(bidmanager, 'addBidResponse');
var params = {
bidderCode: 'adbutler',
bids: [
{
bidId: '3c9408cdbf2f68-7',
sizes: [[300, 250]],
bidder: 'adbutler',
params: {
accountID: '167283',
zoneID: '210093'
},
requestId: '10b327aa396609',
placementCode: '/123456/header-bid-tag-1'
}
]
};
var response = {
status: 'SUCCESS',
account_id: 167283,
zone_id: 210093,
cpm: 1.5,
width: 300,
height: 250,
place: 0,
ad_code: '<img src="http://image.source.com/img" alt="" title="" border="0" width="300" height="250">',
tracking_pixels: [
'http://tracking.pixel.com/params=info'
]
};
adapter().callBids(params);
var adUnits = new Array();
var unit = new Object();
unit.bids = params.bids;
unit.code = '/123456/header-bid-tag-1';
unit.sizes = [[300, 250]];
adUnits.push(unit);
if (typeof ($$PREBID_GLOBAL$$._bidsRequested) === 'undefined') {
$$PREBID_GLOBAL$$._bidsRequested = [params];
} else {
$$PREBID_GLOBAL$$._bidsRequested.push(params);
}
$$PREBID_GLOBAL$$.adUnits = adUnits;
$$PREBID_GLOBAL$$.adbutlerCB(response);
var bidObject1 = stubAddBidResponse.getCall(0).args[1];
expect(bidObject1.getStatusCode()).to.equal(1);
expect(bidObject1.ad).to.have.string('http://tracking.pixel.com/params=info');
stubAddBidResponse.restore();
});
});
});<|fim▁end|>
|
bidderCode: 'adbutler',
bids: [
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from codecs import open # To use a consistent encoding
from os import path
from setuptools import setup
HERE = path.dirname(path.abspath(__file__))
# Get version info
ABOUT = {}
with open(path.join(HERE, 'datadog_checks', 'riak_repl', '__about__.py')) as f:
exec(f.read(), ABOUT)
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
<|fim▁hole|> if not path.isfile(dep_file):
return []
with open(dep_file, encoding='utf-8') as f:
return f.readlines()
def parse_pyproject_array(name):
import os
import re
from ast import literal_eval
pattern = r'^{} = (\[.*?\])$'.format(name)
with open(os.path.join(HERE, 'pyproject.toml'), 'r', encoding='utf-8') as f:
# Windows \r\n prevents match
contents = '\n'.join(line.rstrip() for line in f.readlines())
array = re.search(pattern, contents, flags=re.MULTILINE | re.DOTALL).group(1)
return literal_eval(array)
CHECKS_BASE_REQ = parse_pyproject_array('dependencies')[0]
setup(
name='datadog-riak_repl',
version=ABOUT['__version__'],
description='The Riak_repl check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent riak_repl check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-extras',
# Author details
author='Britt Treece',
author_email='[email protected]',
# License
license='BSD-3-Clause',
# See https://pypi.org/classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
# The package we're going to ship
packages=['datadog_checks', 'datadog_checks.riak_repl'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
extras_require={'deps': parse_pyproject_array('deps')},
# Extra files to ship with the wheel package
include_package_data=True,
)<|fim▁end|>
|
def get_dependencies():
dep_file = path.join(HERE, 'requirements.in')
|
<|file_name|>aws-autoscalingplans-scalingplan_predefinedscalingmetricspecification.go<|end_file_name|><|fim▁begin|>package resources
import "github.com/awslabs/goformation/cloudformation/policies"
// AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification AWS CloudFormation Resource (AWS::AutoScalingPlans::ScalingPlan.PredefinedScalingMetricSpecification)
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-predefinedscalingmetricspecification.html
type AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification struct {
// PredefinedScalingMetricType AWS CloudFormation Property
// Required: true
// See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-predefinedscalingmetricspecification.html#cfn-autoscalingplans-scalingplan-predefinedscalingmetricspecification-predefinedscalingmetrictype
PredefinedScalingMetricType string `json:"PredefinedScalingMetricType,omitempty"`
// ResourceLabel AWS CloudFormation Property<|fim▁hole|> // See: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-autoscalingplans-scalingplan-predefinedscalingmetricspecification.html#cfn-autoscalingplans-scalingplan-predefinedscalingmetricspecification-resourcelabel
ResourceLabel string `json:"ResourceLabel,omitempty"`
// _deletionPolicy represents a CloudFormation DeletionPolicy
_deletionPolicy policies.DeletionPolicy
// _dependsOn stores the logical ID of the resources to be created before this resource
_dependsOn []string
// _metadata stores structured data associated with this resource
_metadata map[string]interface{}
}
// AWSCloudFormationType returns the AWS CloudFormation resource type
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) AWSCloudFormationType() string {
return "AWS::AutoScalingPlans::ScalingPlan.PredefinedScalingMetricSpecification"
}
// DependsOn returns a slice of logical ID names this resource depends on.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) DependsOn() []string {
return r._dependsOn
}
// SetDependsOn specify that the creation of this resource follows another.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-dependson.html
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) SetDependsOn(dependencies []string) {
r._dependsOn = dependencies
}
// Metadata returns the metadata associated with this resource.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-metadata.html
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) Metadata() map[string]interface{} {
return r._metadata
}
// SetMetadata enables you to associate structured data with this resource.
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-metadata.html
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) SetMetadata(metadata map[string]interface{}) {
r._metadata = metadata
}
// SetDeletionPolicy applies an AWS CloudFormation DeletionPolicy to this resource
// see: https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-attribute-deletionpolicy.html
func (r *AWSAutoScalingPlansScalingPlan_PredefinedScalingMetricSpecification) SetDeletionPolicy(policy policies.DeletionPolicy) {
r._deletionPolicy = policy
}<|fim▁end|>
|
// Required: false
|
<|file_name|>updating-widget.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Hewlett Packard Enterprise Development Company, L.P.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
*/
define([
'underscore',
'jquery',
'find/idol/app/page/dashboard/widgets/updating-widget',
'find/idol/app/page/dashboard/update-tracker-model'
], function(_, $, UpdatingWidget, UpdateTrackerModel) {
'use strict';
const spies = jasmine.createSpyObj('spies', ['onComplete', 'onIncrement', 'onCancelled', 'doUpdate']);
const TestUpdatingWidget = UpdatingWidget.extend(spies);
describe('Updating Widget', function() {
beforeEach(function() {
jasmine.addMatchers({
toShowLoadingSpinner: function() {
return {
compare: function(actual) {
const pass = !actual.$loadingSpinner.hasClass('hide');
return {
pass: pass,
message: 'Expected the view ' +
(pass ? 'not ' : '') +
'to show a loading spinner'
};
}
}
}
});
this.widget = new TestUpdatingWidget({
name: 'Test Widget'
});
this.widget.render();
this.updateDeferred = $.Deferred();
this.updateTrackerModel = new UpdateTrackerModel();
});
afterEach(function() {
_.each(spies, function(spy) {
spy.calls.reset();
})
});
describe('when the update is synchronous', function() {
beforeEach(function() {
this.widget.doUpdate.and.callFake(function(done) {
done();
});
this.widget.update(this.updateTrackerModel);
});
it('it should increment the model when the done callback is called', function() {
expect(this.updateTrackerModel.get('count')).toBe(1);
});
it('should call onIncrement when the count increases', function() {
// count increased when the widget updated
expect(this.widget.onIncrement.calls.count()).toBe(1);
});
it('should call onComplete when the model is set to complete', function() {
this.updateTrackerModel.set('complete', true);
expect(this.widget.onComplete.calls.count()).toBe(1);
});
it('should call onCancelled when the model is set to cancelled', function() {
this.updateTrackerModel.set('cancelled', true);
expect(this.widget.onCancelled.calls.count()).toBe(1);
});
});
describe('when the update is asynchronous', function() {
beforeEach(function() {
// when a test resolves the deferred, call the done callback
this.widget.doUpdate.and.callFake(function(done) {
this.updateDeferred.done(done);
}.bind(this));
});
describe('and the update is called', function() {
beforeEach(function() {
this.widget.update(this.updateTrackerModel);
});
it('should show the loading spinner until the update completes', function() {
expect(this.widget).toShowLoadingSpinner();
this.updateDeferred.resolve();
expect(this.widget).not.toShowLoadingSpinner();
});
it('should not increment the model until the update is complete', function() {
expect(this.updateTrackerModel.get('count')).toBe(0);
this.updateDeferred.resolve();
expect(this.updateTrackerModel.get('count')).toBe(1);
});
it('should call onIncrement when the count increases', function() {
this.updateTrackerModel.increment();
expect(this.widget.onIncrement.calls.count()).toBe(1);<|fim▁hole|> });
it('should call onComplete when the model is set to complete', function() {
this.updateTrackerModel.set('complete', true);
expect(this.widget.onComplete.calls.count()).toBe(1);
});
it('should call onCancelled when the model is set to cancelled', function() {
this.updateTrackerModel.set('cancelled', true);
expect(this.widget.onCancelled.calls.count()).toBe(1);
});
});
})
});
});<|fim▁end|>
| |
<|file_name|>bitcoin_cy.ts<|end_file_name|><|fim▁begin|><TS version="2.1" language="cy">
<context>
<name>AddressBookPage</name>
<message>
<source>Create a new address</source>
<translation>Creu cyfeiriad newydd</translation>
</message>
<message>
<source>&New</source>
<translation>&Newydd</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copio'r cyfeiriad sydd wedi'i ddewis i'r clipfwrdd system</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Copïo</translation>
</message>
<message>
<source>C&lose</source>
<translation>C&au</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Dileu'r cyfeiriad presennol wedi ei ddewis o'r rhestr</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Cyfeiriad neu label i chwilio</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Allforio'r data yn y tab presennol i ffeil</translation>
</message>
<message>
<source>&Export</source>
<translation>&Allforio</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Dileu</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Dewis y cyfeiriad i yrru'r arian</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Dewis y cyfeiriad i dderbyn arian</translation>
</message>
<message>
<source>C&hoose</source>
<translation>D&ewis</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Anfon cyfeiriadau</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Derbyn cyfeiriadau</translation>
</message>
<message>
<source>These are your Bitcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Rhain ydi eich cyfeiriadau Bitcoin ar gyfer gyrru taliadau. Gwnewch yn sicr o'r swm a'r cyfeiriad derbyn cyn gyrru arian.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Copïo Cyfeiriad</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Copïo &Label</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Golygu</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Allforio Rhestr Cyfeiriadau</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<extracomment>An error message. %1 is a stand-in argument for the name of the file we attempted to save to.</extracomment>
<translation>Roedd camgymeriad yn trïo safio'r rhestr gyfeiriadau i'r %1. Triwch eto os gwelwch yn dda.</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Methu Allforio</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<source>(no label)</source>
<translation>(dim label)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Deialog Cyfrinair</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Teipiwch gyfrinymadrodd</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Cyfrinymadrodd newydd</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Ailadroddwch gyfrinymadrodd newydd</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Amgryptio'r Waled</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Mae'r weithred hon angen eich cyfrinair waled i ddatgloi'r waled.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Datgloi'r waled</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Newid cyfrinair</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Cadarnhau amgryptio'r waled</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR BITCOINS</b>!</source>
<translation>Rhybudd: Os ydych yn amgryptio'r waled ag yn colli'r cyfrinair, byddwch yn <b> COLLI EICH BITCOINS I GYD <b> !</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Ydych yn siwr eich bod eisiau amgryptio eich waled?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Waled wedi amgryptio</translation>
</message>
<message>
<source>Enter the new passphrase for the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Ysgrifennwch y cyfrinair newydd i'r waled. <br/> Os gwelwch yn dda, defnyddiwch gyfrinair <b> deg neu fwy o gymeriadau ar hap </b> , neu <b> wyth neu fwy o eiriau </b> .</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase for the wallet.</source>
<translation>Ysgrifennwch yr hen gyfrinair a chyfrinair newydd y waled.</translation>
</message>
<message>
<source>Wallet to be encrypted</source>
<translation>Waled i'w amgryptio</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>PWYSIG: Mi ddylai unrhyw back ups blaenorol rydych wedi ei wneud o ffeil eich waled gael ei ddiweddaru efo'r ffeil amgryptiedig newydd ei chreu. Am resymau diogelwch, bydd back ups blaenorol o ffeil y walet heb amgryptio yn ddiwerth mor fuan ac yr ydych yn dechrau defnyddio'r waled amgryptiedig newydd.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Amgryptio waled wedi methu</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Amgryptio waled wedi methu oherwydd gwall mewnol. Dydi eich waled heb amgryptio.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Nid ydi'r cyfrineiriau a gyflenwyd yn cyfateb.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Dadgloi waled wedi methu</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Mae'r cyfrinair ysgrifennwyd ar gyfer datgryptio'r waled yn anghywir.</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Newid cyfrinair waled yn llwyddiannus.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Rhybudd: Mae allwedd Caps Lock ymlaen!</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>IP/Netmask</source>
<translation>IP/Rhwydfwgwd</translation>
</message>
<message>
<source>Banned Until</source>
<translation>Gwaharddwyd Nes</translation>
</message>
</context>
<context>
<name>BitcoinApplication</name>
<message>
<source>A fatal error occurred. %1 can no longer continue safely and will quit.</source>
<translation>Mae gwall angheuol wedi digwydd. Ni all %1 barhau'n ddiogel ac mae'n cau lawr.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>&Overview</source>
<translation>&Trosolwg</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Dangos trosolwg cyffredinol y waled</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Trafodion</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Pori hanes trafodion</translation>
</message>
<message>
<source>E&xit</source>
<translation>A&llanfa</translation>
</message>
<message>
<source>Quit application</source>
<translation>Gadael rhaglen</translation>
</message>
<message>
<source>&About %1</source>
<translation>&Ynghylch %1</translation>
</message>
<message>
<source>Show information about %1</source>
<translation>Dangos gwybodaeth am %1</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Ynghylch &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Dangos gwybodaeth am Qt</translation>
</message>
<message>
<source>Modify configuration options for %1</source>
<translation>Addasu ffurfweddiad dewisiadau ar gyfer %1</translation>
</message>
<message>
<source>Wallet:</source>
<translation>Waled:</translation>
</message>
<message>
<source>Network activity disabled.</source>
<extracomment>A substring of the tooltip.</extracomment>
<translation>Gweithgaredd rhwydwaith wedi anablu.</translation>
</message>
<message>
<source>Send coins to a Bitcoin address</source>
<translation>Anfon arian i gyfeiriad Bitcoin</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Bacio fyny'r waled i leoliad arall</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Newid y cyfrinair ddefnyddiwyd ar gyfer amgryptio'r waled</translation>
</message>
<message>
<source>&Send</source>
<translation>&Anfon</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Derbyn</translation>
</message>
<message>
<source>&Options…</source>
<translation type="unfinished">&Opsiynau…</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Dangos / Cuddio</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Dangos neu guddio y brif Ffenest</translation>
</message>
<message>
<source>&Encrypt Wallet…</source>
<translation type="unfinished">&Amgryptio'r waled…</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Amgryptio'r allweddi preifat sy'n perthyn i'ch waled</translation>
</message>
<message>
<source>&Backup Wallet…</source>
<translation type="unfinished">&Backup Waled…</translation>
</message>
<message>
<source>&Change Passphrase…</source>
<translation type="unfinished">&Newid cyfrinymadrodd…</translation>
</message>
<message>
<source>Sign &message…</source>
<translation type="unfinished">Arwyddo &neges…</translation>
</message>
<message>
<source>Sign messages with your Bitcoin addresses to prove you own them</source>
<translation>Arwyddo negeseuon gyda eich cyfeiriadau Bitcoin i brofi mae chi sy'n berchen arnynt</translation>
</message>
<message>
<source>&Verify message…</source>
<translation type="unfinished">&Gwirio neges…</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Bitcoin addresses</source>
<translation>Gwirio negeseuon i sicrhau eu bod wedi eu harwyddo gyda cyfeiriadau Bitcoin penodol</translation>
</message>
<message>
<source>Open &URI…</source>
<translation type="unfinished">Agor &URI…</translation>
</message>
<message>
<source>Close Wallet…</source>
<translation type="unfinished">Cau Waled…</translation>
</message>
<message>
<source>&File</source>
<translation>&Ffeil</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Gosodiadau</translation>
</message>
<message>
<source>&Help</source>
<translation>&Cymorth</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Bar offer tabiau</translation>
</message>
<message>
<source>Syncing Headers (%1%)…</source>
<translation type="unfinished">Syncio pennawdau (%1%)…</translation>
</message>
<message>
<source>Synchronizing with network…</source>
<translation type="unfinished">Cysoni â'r rhwydwaith…</translation>
</message>
<message>
<source>Indexing blocks on disk…</source>
<translation type="unfinished">Mynegai'r blociau ar ddisg…</translation>
</message>
<message>
<source>Processing blocks on disk…</source>
<translation type="unfinished">Prosesu blociau ar ddisg…</translation>
</message>
<message>
<source>Reindexing blocks on disk…</source>
<translation type="unfinished">Ailfynegi y blociau ar ddisg…</translation>
</message>
<message>
<source>Connecting to peers…</source>
<translation type="unfinished">Cysylltu efo cyfoedion…</translation>
</message>
<message>
<source>Request payments (generates QR codes and bitcoin: URIs)</source>
<translation>Gofyn taliadau (creu côd QR a bitcoin: URIs)</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Dangos rhestr o gyfeiriadau danfon a labelau wedi eu defnyddio</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Dangos rhestr o gyfeiriadau derbyn a labelau wedi eu defnyddio</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Dewisiadau Gorchymyn-llinell</translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 Tu ôl</translation>
</message>
<message>
<source>Catching up…</source>
<translation type="unfinished">Dal i fyny…</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Cafodd y bloc olaf i'w dderbyn ei greu %1 yn ôl.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Ni fydd trafodion ar ôl hyn yn weledol eto.</translation>
</message>
<message>
<source>Error</source>
<translation>Gwall</translation>
</message>
<message>
<source>Warning</source>
<translation>Rhybudd</translation>
</message>
<message>
<source>Information</source>
<translation>Gwybodaeth</translation>
</message>
<message>
<source>Up to date</source>
<translation>Cyfamserol</translation>
</message>
<message>
<source>&Sending addresses</source>
<translation type="unfinished">&Cyfeiriadau anfon</translation>
</message>
<message>
<source>&Receiving addresses</source>
<translation type="unfinished">&Cyfeiriadau derbyn</translation>
</message>
<message>
<source>Open Wallet</source>
<translation>Agor Waled</translation>
</message>
<message>
<source>Open a wallet</source>
<translation>Agor waled</translation>
</message>
<message>
<source>Close wallet</source>
<translation>Cau waled</translation>
</message>
<message>
<source>&Window</source>
<translation>&Ffenestr</translation>
</message>
<message>
<source>Error: %1</source>
<translation> Gwall: %1</translation>
</message>
<message>
<source>Warning: %1</source>
<translation>Rhybudd: %1</translation>
</message>
<message>
<source>Date: %1
</source>
<translation>Dyddiad: %1
</translation>
</message>
<message>
<source>Amount: %1
</source>
<translation>Cyfanswm: %1
</translation>
</message>
<message>
<source>Wallet: %1
</source>
<translation>Waled: %1
</translation>
</message>
<message>
<source>Type: %1
</source>
<translation>Math: %1
</translation>
</message>
<message>
<source>Address: %1
</source>
<translation>Cyfeiriad: %1
</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Trafodiad anfonwyd</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Trafodiad sy'n cyrraedd</translation>
</message>
<message>
<source>HD key generation is <b>enabled</b></source>
<translation>Cynhyrchu allweddi HD wedi ei <b> alluogi </b></translation>
</message>
<message>
<source>HD key generation is <b>disabled</b></source>
<translation>Cynhyrchu allweddi HD wedi'w <b> anablu </b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Mae'r waled <b>wedi'i amgryptio</b> ac <b>heb ei gloi</b> ar hyn o bryd</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Mae'r waled <b>wedi'i amgryptio</b> ac <b>ar glo</b> ar hyn o bryd</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Dewis Ceiniog</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Maint:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Maint</translation>
</message>
<message>
<source>Amount:</source>
<translation>Cyfanswm:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Ffî:</translation>
</message>
<message>
<source>Dust:</source>
<translation>Llwch</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Ar Ôl Ffî</translation>
</message>
<message>
<source>Change:</source>
<translation>Newid:</translation>
</message>
<message>
<source>Amount</source>
<translation>Cyfanswm</translation>
</message>
<message>
<source>Received with label</source>
<translation>Derbynwyd gyda label</translation>
</message>
<message>
<source>Received with address</source>
<translation>Derbynwyd gyda chyfeiriad</translation>
</message>
<message>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Cadarnhadiadau</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Cadarnhawyd</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copïo Cyfanswm</translation>
</message>
<message>
<source>&Copy address</source>
<translation type="unfinished">&Copïo cyfeiriad</translation>
</message>
<message>
<source>Copy &label</source>
<translation type="unfinished">&Copïo label</translation>
</message>
<message>
<source>Copy &amount</source>
<translation>Copïo cyfansw&m</translation>
</message>
<message>
<source>(no label)</source>
<translation>(dim label)</translation>
</message>
<message>
<source>(change)</source>
<translation>(newid)</translation>
</message>
</context>
<context>
<name>CreateWalletActivity</name>
</context>
<context>
<name>CreateWalletDialog</name>
<message>
<source>Wallet</source>
<translation>Waled</translation>
</message>
<message>
<source>Encrypt Wallet</source>
<translation type="unfinished">Amgryptio'r waled</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Golygu'r cyfeiriad</translation>
</message>
<message>
<source>&Address</source>
<translation>&Cyfeiriad</translation>
</message>
<message>
<source>New sending address</source>
<translation>Cyfeiriad anfon newydd</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Golygu'r cyfeiriad derbyn</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Golygu'r cyfeiriad anfon</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Methodd ddatgloi'r waled.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Methodd gynhyrchu allwedd newydd.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>name</source>
<translation>enw</translation>
</message>
</context>
<context>
<name>GuiNetWatch</name>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>About %1</source>
<translation type="unfinished">Ynghylch %1</translation>
</message>
<message>
<source>Command-line options</source>
<translation type="unfinished">Dewisiadau Gorchymyn-llinell</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Croeso</translation>
</message>
<message>
<source>Error</source>
<translation>Gwall</translation>
</message>
</context>
<context>
<name>MempoolStats</name>
</context>
<context>
<name>ModalOverlay</name>
<message>
<source>Form</source>
<translation>Ffurflen</translation>
</message>
</context>
<context>
<name>NetWatchLogModel</name>
<message>
<source>Type</source>
<comment>NetWatch: Type header</comment>
<translation>Math</translation>
</message>
<message>
<source>Address</source>
<comment>NetWatch: Address header</comment>
<translation>Cyfeiriad</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Paste address from clipboard</source>
<extracomment>Tooltip text for button that allows you to paste an address that is in your clipboard.</extracomment>
<translation>Gludo cyfeiriad o'r glipfwrdd</translation>
</message>
</context>
<context>
<name>OpenWalletActivity</name>
<message>
<source>Open Wallet</source>
<extracomment>Title of window indicating the progress of opening of a wallet.</extracomment>
<translation>Agor Waled</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opsiynau</translation>
</message>
<message>
<source>&Network</source>
<translation>&Rhwydwaith</translation>
</message>
<message>
<source>W&allet</source>
<translation>W&aled</translation>
</message>
<message>
<source>&Window</source>
<translation>&Ffenestr</translation>
</message>
<message>
<source>&Display</source>
<translation>&Dangos</translation>
</message>
<message>
<source>Bitcoin Core</source>
<translation>Craidd Bitcoin</translation>
</message>
<message>
<source>Error</source>
<translation>Gwall</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Ffurflen</translation>
</message>
</context>
<context>
<name>PSBTOperationsDialog</name>
<message>
<source>Close</source>
<translation type="unfinished">Cau</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>Address</source>
<extracomment>Title of Peers Table column which contains the IP/Onion/I2P address of the connected peer.</extracomment>
<translation>Cyfeiriad</translation>
</message>
<message>
<source>Type</source>
<extracomment>Title of Peers Table column which describes the type of peer connection. The "type" describes why the connection exists.</extracomment>
<translation>Math</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Cyfanswm</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n awr</numerusform><numerusform>%n awr</numerusform><numerusform>%n awr</numerusform><numerusform>%n awr</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n dydd</numerusform><numerusform>%n dydd</numerusform><numerusform>%n dydd</numerusform><numerusform>%n dydd</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n wythnos</numerusform><numerusform>%n wythnos</numerusform><numerusform>%n wythnos</numerusform><numerusform>%n wythnos</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 a %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n blwydd</numerusform><numerusform>%n blwydd</numerusform><numerusform>%n blwydd</numerusform><numerusform>%n blwydd</numerusform></translation>
</message>
<message>
<source>Error: %1</source>
<translation> Gwall: %1</translation>
</message>
<message>
<source>Txn</source>
<comment>Tx Watch: Transaction type abbreviation</comment>
<translation>Tfd</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>&Information</source>
<translation>Gwybodaeth</translation>
</message>
<message>
<source>Network</source>
<translation>Rhwydwaith</translation>
</message>
<message>
<source>Wallet: </source>
<translation type="unfinished">Waled: </translation>
</message>
<message>
<source>&Open</source>
<translation>&Agor</translation>
</message>
<message>
<source>&Copy address</source>
<extracomment>Context menu action to copy the address of a peer</extracomment>
<translation type="unfinished">&Copïo cyfeiriad</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Maint</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Neges:</translation>
</message>
<message>
<source>&Copy address</source>
<translation type="unfinished">&Copïo cyfeiriad</translation>
</message>
<message>
<source>Copy &label</source>
<translation type="unfinished">&Copïo label</translation>
</message>
<message>
<source>Copy &amount</source>
<translation>Copïo cyfansw&m</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Methodd ddatgloi'r waled.</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Address:</source>
<translation type="unfinished">Cyfeiriad:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Cyfanswm:</translation>
</message>
<message>
<source>Message:</source>
<translation>Neges:</translation>
</message>
<message>
<source>Wallet:</source>
<translation>Waled:</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>&Cyfeiriad Copi</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<source>Message</source>
<translation>Neges</translation>
</message>
<message>
<source>(no label)</source>
<translation>(dim label)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Anfon arian</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Maint:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Maint</translation>
</message>
<message>
<source>Amount:</source>
<translation>Cyfanswm:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Ffî:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Ar Ôl Ffî</translation>
</message>
<message>
<source>Change:</source>
<translation>Newid:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Anfon at pobl lluosog ar yr un pryd</translation>
</message>
<message>
<source>Dust:</source>
<translation>Llwch</translation>
</message>
<message>
<source>Choose…</source>
<translation type="unfinished">Dewis…</translation>
</message>
<message>
<source>Balance:</source>
<translation>Gweddill:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Cadarnhau'r gweithrediad anfon</translation>
</message>
<message>
<source>S&end</source>
<translation type="unfinished">&Anfon</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copïo Cyfanswm</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 i %2</translation>
</message>
<message>
<source>(no label)</source>
<translation>(dim label)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>&Maint</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Gludo cyfeiriad o'r glipfwrdd</translation>
</message>
<message>
<source>Message:</source>
<translation>Neges:</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Paste address from clipboard</source>
<translation>Gludo cyfeiriad o'r glipfwrdd</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Gwirio neges</translation>
</message>
<message>
<source>Verify &Message</source>
<translation type="unfinished">&Gwirio neges</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Agor tan %1</translation>
</message>
<message>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<source>Message</source>
<translation>Neges</translation>
</message>
<message>
<source>Transaction</source>
<translation>Trafodiad</translation>
</message>
<message>
<source>Amount</source>
<translation>Cyfanswm</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<source>Type</source>
<translation>Math</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Agor tan %1</translation>
</message>
<message>
<source>(no label)</source>
<translation>(dim label)</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Today</source>
<translation>Heddiw</translation>
</message>
<message>
<source>This week</source>
<translation>Yr wythnos hon</translation>
</message>
<message>
<source>This month</source>
<translation>Y mis hwn</translation>
</message>
<message>
<source>Last month</source>
<translation>Mis diwethaf</translation>
</message>
<message>
<source>This year</source>
<translation>Eleni</translation>
</message>
<message>
<source>&Copy address</source>
<translation type="unfinished">&Copïo cyfeiriad</translation>
</message><|fim▁hole|> <message>
<source>Copy &label</source>
<translation type="unfinished">&Copïo label</translation>
</message>
<message>
<source>Copy &amount</source>
<translation>Copïo cyfansw&m</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Cadarnhawyd</translation>
</message>
<message>
<source>Date</source>
<translation>Dyddiad</translation>
</message>
<message>
<source>Type</source>
<translation>Math</translation>
</message>
<message>
<source>Address</source>
<translation>Cyfeiriad</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Methu Allforio</translation>
</message>
</context>
<context>
<name>WalletController</name>
<message>
<source>Close wallet</source>
<translation>Cau waled</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>Error</source>
<translation>Gwall</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Anfon arian</translation>
</message>
<message>
<source>Current fee:</source>
<translation>Ffi gyfredol</translation>
</message>
<message>
<source>Increase:</source>
<translation>Cynydd:</translation>
</message>
<message>
<source>New fee:</source>
<translation>Ffi newydd:</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Allforio</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Allforio'r data yn y tab presennol i ffeil</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation type="unfinished">Backup Waled</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Error</source>
<translation>Gwall</translation>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>from django.contrib.auth import get_user_model
from django.db import transaction
from django.db.models.signals import pre_delete
from django.dispatch import Signal, receiver
from misago.categories.models import Category
from misago.categories.signals import delete_category_content, move_category_content
from misago.core.pgutils import batch_delete, batch_update
from misago.users.signals import delete_user_content, username_changed
from .models import Attachment, Post, PostEdit, PostLike, Thread, Poll, PollVote
delete_post = Signal()
delete_thread = Signal()
merge_post = Signal(providing_args=["other_post"])
merge_thread = Signal(providing_args=["other_thread"])
move_post = Signal()
move_thread = Signal()
remove_thread_participant = Signal(providing_args=["user"])
"""
Signal handlers
"""
@receiver(merge_thread)<|fim▁hole|> other_thread = kwargs['other_thread']
other_thread.post_set.update(category=sender.category, thread=sender)
@receiver(merge_post)
def merge_posts(sender, **kwargs):
other_post = kwargs['other_post']
for user in sender.mentions.iterator():
other_post.mentions.add(user)
@receiver(move_thread)
def move_thread_content(sender, **kwargs):
Post.objects.filter(thread=sender).update(category=sender.category)
PostEdit.objects.filter(thread=sender).update(category=sender.category)
PostLike.objects.filter(thread=sender).update(category=sender.category)
Poll.objects.filter(thread=sender).update(category=sender.category)
PollVote.objects.filter(thread=sender).update(category=sender.category)
@receiver(delete_category_content)
def delete_category_threads(sender, **kwargs):
sender.thread_set.all().delete()
sender.post_set.all().delete()
@receiver(move_category_content)
def move_category_threads(sender, **kwargs):
new_category = kwargs['new_category']
Thread.objects.filter(category=sender).update(category=new_category)
Post.objects.filter(category=sender).update(category=new_category)
PostEdit.objects.filter(category=sender).update(category=new_category)
PostLike.objects.filter(category=sender).update(category=new_category)
Poll.objects.filter(category=sender).update(category=new_category)
PollVote.objects.filter(category=sender).update(category=new_category)
@receiver(delete_user_content)
def delete_user_threads(sender, **kwargs):
recount_categories = set()
recount_threads = set()
for thread in batch_delete(sender.thread_set.all(), 50):
recount_categories.add(thread.category_id)
with transaction.atomic():
thread.delete()
for post in batch_delete(sender.post_set.all(), 50):
recount_categories.add(post.category_id)
recount_threads.add(post.thread_id)
with transaction.atomic():
post.delete()
if recount_threads:
changed_threads_qs = Thread.objects.filter(id__in=recount_threads)
for thread in batch_update(changed_threads_qs, 50):
thread.synchronize()
thread.save()
if recount_categories:
for category in Category.objects.filter(id__in=recount_categories):
category.synchronize()
category.save()
@receiver(username_changed)
def update_usernames(sender, **kwargs):
Thread.objects.filter(starter=sender).update(
starter_name=sender.username,
starter_slug=sender.slug
)
Thread.objects.filter(last_poster=sender).update(
last_poster_name=sender.username,
last_poster_slug=sender.slug
)
Post.objects.filter(poster=sender).update(poster_name=sender.username)
Post.objects.filter(last_editor=sender).update(
last_editor_name=sender.username,
last_editor_slug=sender.slug
)
PostEdit.objects.filter(editor=sender).update(
editor_name=sender.username,
editor_slug=sender.slug
)
PostLike.objects.filter(user=sender).update(
user_name=sender.username,
user_slug=sender.slug
)
Attachment.objects.filter(uploader=sender).update(
uploader_name=sender.username,
uploader_slug=sender.slug
)
Poll.objects.filter(poster=sender).update(
poster_name=sender.username,
poster_slug=sender.slug
)
PollVote.objects.filter(voter=sender).update(
voter_name=sender.username,
voter_slug=sender.slug
)
@receiver(pre_delete, sender=get_user_model())
def remove_unparticipated_private_threads(sender, **kwargs):
threads_qs = kwargs['instance'].private_thread_set.all()
for thread in batch_update(threads_qs, 50):
if thread.participants.count() == 1:
with transaction.atomic():
thread.delete()<|fim▁end|>
|
def merge_threads_posts(sender, **kwargs):
|
<|file_name|>rms.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Request Management System - Controllers """
prefix = request.controller
resourcename = request.function
if prefix not in deployment_settings.modules:
session.error = T("Module disabled!")
redirect(URL(r=request, c="default", f="index"))
# Options Menu (available in all Functions' Views)
menu = [
[T("Home"), False, URL(r=request, f="index")],
[T("Requests"), False, URL(r=request, f="req"), [
[T("List"), False, URL(r=request, f="req")],
[T("Add"), False, URL(r=request, f="req", args="create")],
# @ToDo Search by priority, status, location
#[T("Search"), False, URL(r=request, f="req", args="search")],
]],
[T("All Requested Items"), False, URL(r=request, f="ritem")],
]
if session.rcvars:
if "hms_hospital" in session.rcvars:
hospital = db.hms_hospital
query = (hospital.id == session.rcvars["hms_hospital"])
selection = db(query).select(hospital.id, hospital.name, limitby=(0, 1)).first()
if selection:
menu_hospital = [
[selection.name, False, URL(r=request, c="hms", f="hospital", args=[selection.id])]
]
menu.extend(menu_hospital)
if "cr_shelter" in session.rcvars:
shelter = db.cr_shelter
query = (shelter.id == session.rcvars["cr_shelter"])
selection = db(query).select(shelter.id, shelter.name, limitby=(0, 1)).first()
if selection:
menu_shelter = [
[selection.name, False, URL(r=request, c="cr", f="shelter", args=[selection.id])]
]
menu.extend(menu_shelter)
response.menu_options = menu
def index():
""" Module's Home Page
Default to the rms_req list view.
"""
request.function = "req"
request.args = []
return req()
#module_name = deployment_settings.modules[prefix].name_nice
#response.title = module_name
#return dict(module_name=module_name, a=1)
def req():
""" RESTful CRUD controller """
resourcename = request.function # check again in case we're coming from index()
tablename = "%s_%s" % (prefix, resourcename)
table = db[tablename]
# Pre-processor
def prep(r):
response.s3.cancel = r.here()
if r.representation in shn_interactive_view_formats and r.method != "delete":
# Don't send the locations list to client (pulled by AJAX instead)
r.table.location_id.requires = IS_NULL_OR(IS_ONE_OF_EMPTY(db, "gis_location.id"))
#if r.method == "create" and not r.component:
# listadd arrives here as method=None
if not r.component:
table.datetime.default = request.utcnow
table.person_id.default = s3_logged_in_person()
# @ToDo Default the Organisation too
return True
response.s3.prep = prep
# Post-processor
def postp(r, output):
if r.representation in shn_interactive_view_formats:
#if r.method == "create" and not r.component:
# listadd arrives here as method=None
if r.method != "delete" and not r.component:
# Redirect to the Assessments tabs after creation
r.next = r.other(method="ritem", record_id=s3xrc.get_session(prefix, resourcename))
# Custom Action Buttons
if not r.component:
response.s3.actions = [
dict(label=str(T("Open")), _class="action-btn", url=str(URL(r=request, args=["[id]", "update"]))),
dict(label=str(T("Items")), _class="action-btn", url=str(URL(r=request, args=["[id]", "ritem"]))),
]
return output
response.s3.postp = postp
s3xrc.model.configure(table,
#listadd=False, #@todo: List add is causing errors with JS - FIX
editable=True)
return s3_rest_controller(prefix,
resourcename,
rheader=shn_rms_req_rheader)
def shn_rms_req_rheader(r):
""" Resource Header for Requests """
if r.representation == "html":
if r.name == "req":
req_record = r.record
if req_record:
_next = r.here()<|fim▁hole|> try:
location = db(db.gis_location.id == req_record.location_id).select(limitby=(0, 1)).first()
location_represent = shn_gis_location_represent(location.id)
except:
location_represent = None
rheader_tabs = shn_rheader_tabs( r,
[(T("Edit Details"), None),
(T("Items"), "ritem"),
]
)
rheader = DIV( TABLE(
TR( TH( T("Message") + ": "),
TD(req_record.message, _colspan=3)
),
TR( TH( T("Time of Request") + ": "),
req_record.datetime,
TH( T( "Location") + ": "),
location_represent,
),
TR( TH( T("Priority") + ": "),
req_record.priority,
TH( T("Document") + ": "),
document_represent(req_record.document_id)
),
),
rheader_tabs
)
return rheader
return None
def ritem():
""" RESTful CRUD controller """
tablename = "%s_%s" % (prefix, resourcename)
table = db[tablename]
s3xrc.model.configure(table, insertable=False)
return s3_rest_controller(prefix, resourcename)
def store_for_req():
store_table = None
return dict(store_table = store_table)<|fim▁end|>
|
_same = r.same()
|
<|file_name|>app-routing.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
//import { UIRouter } from "@uirouter/angular";
import { BirthplacesComponent } from './modules/birthplace/components/birthplaces/birthplaces.component';
import { SignupComponent } from './modules/login/components/signup/signup.component';
import { LoginComponent } from './modules/login/components/login/login.component';
import { BirthplaceDetailsComponent } from './modules/birthplace/components/birthplace-details/birthplace-details.component';
import { BirthplacesListComponent } from './modules/birthplace/components/birthplaces-list/birthplaces-list.component';
import { ExperienceAddBioComponent } from './modules/experience/components/experience-add-bio/experience-add-bio.component';
import { ExperienceAddUmgebungComponent } from './modules/experience/components/experience-add-umgebung/experience-add-umgebung.component';
import { ExperienceAddComponent } from './modules/experience/components/experience-add/experience-add.component';
import { UserDashboardComponent } from './modules/experience/components/user-dashboard/user-dashboard.component';
import { UserExperienceListComponent } from './modules/experience/components/user-experience-list/user-experience-list.component';
import { AuthGuard } from './shared/guards/auth.guard';
import { ExperienceAddEmotionalComponent } from './modules/experience/components/experience-add-emotional/experience-add-emotional.component';
import { ExperienceAddWochenbettComponent } from './modules/experience/components/experience-add-wochenbett/experience-add-wochenbett.component';
import { ExperienceAddMentalComponent } from './modules/experience/components/experience-add-mental/experience-add-mental.component';
import { ExperienceAddKoerperlichComponent } from './modules/experience/components/experience-add-koerperlich/experience-add-koerperlich.component';
import { AboutComponent } from './components/about/about.component';
import { ContactComponent } from './components/contact/contact.component';
import { ImpressumComponent } from './components/impressum/impressum.component';
import { BirthcriteriaComponent } from './components/birthcriteria/birthcriteria.component';
import { MetaGuard, MetaModule, MetaLoader, MetaStaticLoader, PageTitlePositioning } from '@ngx-meta/core';
import { SignupGeburtshausComponent } from './components/signup-geburtshaus/signup-geburtshaus.component';
import { SignupSpitalComponent } from './components/signup-spital/signup-spital.component';
const routes: Routes = [
{ path: '', redirectTo: '/birthplaces', pathMatch: 'full' },
{
path: 'about',
canActivate: [MetaGuard],
component: AboutComponent,
data: {
meta: {
title: 'Über uns'
}
}
},
{
path: 'birthcriteria',
canActivate: [MetaGuard],
component: BirthcriteriaComponent,
data: {
meta: {
title: 'Qualitätskatalog'
}
}
},
{
path: 'contact',
canActivate: [MetaGuard],
component: ContactComponent,
data: {
meta: {
title: 'Kontakt'
}
}
},
{
path: 'impressum',
canActivate: [MetaGuard],
component: ImpressumComponent,
data: {
meta: {
title: 'Impressum'
}
}
},
{
path: 'signup-geburtshaus',
canActivate: [MetaGuard],
component: SignupGeburtshausComponent,
data: {
meta: {
title: 'Geburtshaus Anmelden'
}
}
},
{
path: 'signup-spital',
canActivate: [MetaGuard],
component: SignupSpitalComponent,
data: {
meta: {
title: 'Spital Anmelden'
}
}
},
{
path: 'login',
canActivate: [MetaGuard],
component: LoginComponent,
data: {
meta: {
title: 'Einloggen'
}
}
},
{
path: 'signup',
canActivate: [MetaGuard],
component: SignupComponent,
data: {
meta: {
title: 'Anmelden'
}
}
},
{
path: 'birthplaces',
canActivateChild: [MetaGuard],
component: BirthplacesComponent,
children: [
{
path: '',
component: BirthplacesListComponent,
data: {
meta: {
title: 'Willkommen'
}
}
},
{
path: 'details/:id',
component: BirthplaceDetailsComponent,
data: {
meta: {
title: 'Details'
}
}
}
]
},
{
path: 'user-dashboard', component: UserDashboardComponent,
canActivate: [AuthGuard, MetaGuard],
data: {
meta: {
title: 'Ihre Geburtserlebnisse'
}
},
children: [
{ path: '', component: UserExperienceListComponent },
{
path: 'experience/new', component: ExperienceAddComponent,
children: [
{ path: '', component: ExperienceAddBioComponent },
{ path: 'bio', component: ExperienceAddBioComponent },
{ path: 'umgebung', component: ExperienceAddUmgebungComponent },
{ path: 'emotional', component: ExperienceAddEmotionalComponent },
{ path: 'koerperlich', component: ExperienceAddKoerperlichComponent },
{ path: 'mental', component: ExperienceAddMentalComponent },
{ path: 'wochenbett', component: ExperienceAddWochenbettComponent }
]
}
]
}
];
export function metaFactory(): MetaLoader {
return new MetaStaticLoader({<|fim▁hole|> pageTitlePositioning: PageTitlePositioning.PrependPageTitle,
pageTitleSeparator: ' - ',
applicationName: 'birthhood',
defaults: {
title: 'Der Geburtsortfinder',
description: 'Finden Sie den geeigneten Geburtsort für Sie und Ihr Kind',
'og:image': 'https://birthhood.org/icon.png',
'og:type': 'website',
'og:locale': 'ch_DE',
'og:locale:alternate': '',
'twitter:title': '',
}
});
}
@NgModule({
imports: [
RouterModule.forRoot(
routes/*,
{ enableTracing: true } */// <-- debugging purposes only
),
MetaModule.forRoot({
provide: MetaLoader,
useFactory: (metaFactory)
})
],
exports: [RouterModule]
})
export class AppRoutingModule { }<|fim▁end|>
| |
<|file_name|>lists.js<|end_file_name|><|fim▁begin|>// *****************************************************************************
// Copyright 2013-2019 Aerospike, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// *****************************************************************************
'use strict'
/* eslint-env mocha */
/* global expect */
const Aerospike = require('../lib/aerospike')
const helper = require('./test_helper')
const AerospikeError = Aerospike.AerospikeError
const lists = Aerospike.lists
const ops = Aerospike.operations
const Context = Aerospike.cdt.Context
const status = Aerospike.status
const eql = require('deep-eql')
const {
assertError,
assertRecordEql,
assertResultEql,
assertResultSatisfy,
cleanup,
createRecord,
expectError,
initState,
operate
} = require('./util/statefulAsyncTest')
const orderList = (bin, ctx) => {
const setListOrder = lists.setOrder(bin, lists.order.ORDERED)
if (ctx) setListOrder.withContext(ctx)
return operate(setListOrder)
}
describe('client.operate() - CDT List operations', function () {
helper.skipUnlessSupportsFeature(Aerospike.features.CDT_LIST, this)
let ListOutOfBoundsError
before(() => {
ListOutOfBoundsError = helper.cluster.isVersionInRange('>=4.6.0')
? status.ERR_OP_NOT_APPLICABLE
: status.ERR_REQUEST_INVALID
})
describe('lists.setOrder', function () {
it('changes the list order', function () {
return initState()
.then(createRecord({ list: [3, 1, 2] }))
.then(operate([
lists.setOrder('list', lists.order.ORDERED),
ops.read('list')
]))
.then(assertResultEql({ list: [1, 2, 3] }))
.then(cleanup())
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('changes the order of a nested list', function () {
return initState()
.then(createRecord({ list: [[3, 1, 2], [6, 5, 4]] }))
.then(operate([
lists.setOrder('list', lists.order.ORDERED).withContext(ctx => ctx.addListIndex(0)),
lists.setOrder('list', lists.order.ORDERED).withContext(ctx => ctx.addListIndex(1)),
ops.read('list')
]))
.then(assertResultEql({ list: [[1, 2, 3], [4, 5, 6]] }))
.then(cleanup())
})
})
})
describe('lists.sort', function () {
it('sorts the list', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 1] }))
.then(operate([
lists.sort('list', lists.sortFlags.DEFAULT),
ops.read('list')
]))
.then(assertResultEql({ list: [1, 1, 2, 3] }))
.then(cleanup())
})
context('with DROP_DUPLICATES flag', function () {
it('sorts the list and drops duplicates', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 1] }))
.then(operate([
lists.sort('list', lists.sortFlags.DROP_DUPLICATES),
ops.read('list')
]))
.then(assertResultEql({ list: [1, 2, 3] }))
.then(cleanup())
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('sorts a nested list', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [3, 1, 2, 1]] }))
.then(operate([
lists.sort('list', lists.sortFlags.DEFAULT).withContext(ctx => ctx.addListIndex(-1)),
ops.read('list')
]))
.then(assertResultEql({ list: [['a', 'b', 'c'], [1, 1, 2, 3]] }))
.then(cleanup())
})
})
})
describe('lists.append', function () {
it('appends an item to the list and returns the list size', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.append('list', 99)))
.then(assertResultEql({ list: 6 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5, 99] }))
.then(cleanup)
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('returns an error when trying to append a non-unique element', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.append('list', 3, policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('returns an error when trying to append a non-unique element', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.append('list', 3, policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('appends a value to a nested list', function () {
return initState()
.then(createRecord({ list: [1, 2, ['a', 'b', 'c'], 4, 5] }))
.then(operate(lists.append('list', 'd').withContext(ctx => ctx.addListIndex(2))))
.then(assertResultEql({ list: 4 }))
.then(assertRecordEql({ list: [1, 2, ['a', 'b', 'c', 'd'], 4, 5] }))
.then(cleanup)
})
})
})
describe('lists.appendItems', function () {
it('appends the items to the list and returns the list size', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.appendItems('list', [99, 100])))
.then(assertResultEql({ list: 7 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5, 99, 100] }))
.then(cleanup)
})
it('returns an error if the value to append is not an array', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.appendItems('list', 99)))
.then(assertError(status.ERR_PARAM))
.then(cleanup)
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('returns an error when appending duplicate items', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.appendItems('list', [3, 6], policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('does not append any items but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.appendItems('list', [3, 6], policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
context('with partial flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL | lists.writeFlags.PARTIAL
}
it('appends only the unique items', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.appendItems('list', [3, 6], policy)))
.then(assertResultEql({ list: 6 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5, 6] }))
.then(cleanup)
})
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('appends the items to a nested list', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.appendItems('map', [99, 100]).withContext(ctx => ctx.addMapKey('list'))))
.then(assertResultEql({ map: 7 }))
.then(assertRecordEql({ map: { list: [1, 2, 3, 4, 5, 99, 100] } }))
.then(cleanup)
})
})
})
describe('lists.insert', function () {
it('inserts the item at the specified index and returns the list size', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insert('list', 2, 99)))
.then(assertResultEql({ list: 6 }))
.then(assertRecordEql({ list: [1, 2, 99, 3, 4, 5] }))
.then(cleanup)
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('returns an error when trying to insert a non-unique element', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.insert('list', 2, 3, policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('does not insert the item but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insert('list', 2, 3, policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with insert-bounded flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = new Aerospike.ListPolicy({
writeFlags: lists.writeFlags.INSERT_BOUNDED
})
it('returns an error when trying to insert an item outside the current bounds of the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.insert('list', 10, 99, policy)))
.then(assertError(ListOutOfBoundsError))
.then(cleanup)
})
context('with no-fail flag', function () {
const policy = new Aerospike.ListPolicy({
writeFlags: lists.writeFlags.INSERT_BOUNDED | lists.writeFlags.NO_FAIL
})
it('does not insert an item outside bounds, but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insert('list', 10, 99, policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('inserts the item at the specified index of a nested list', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.insert('map', 2, 99).withContext(ctx => ctx.addMapKey('list'))))
.then(assertResultEql({ map: 6 }))
.then(assertRecordEql({ map: { list: [1, 2, 99, 3, 4, 5] } }))
.then(cleanup)
})
})
})
describe('lists.insertItems', function () {
it('inserts the items at the specified index and returns the list size', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insertItems('list', 2, [99, 100])))
.then(assertResultEql({ list: 7 }))
.then(assertRecordEql({ list: [1, 2, 99, 100, 3, 4, 5] }))
.then(cleanup)
})
it('returns an error if the value to insert is not an array', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.insertItems('list', 2, 99)))
.then(assertError(status.ERR_PARAM))
.then(cleanup)
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('returns an error when trying to insert items that already exist in the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.insertItems('list', 2, [3, 99], policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('does not insert any items but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insertItems('list', 2, [3, 99], policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
context('with partial flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL | lists.writeFlags.PARTIAL
}<|fim▁hole|>
it('inserts only the unique items', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insertItems('list', 2, [3, 99], policy)))
.then(assertResultEql({ list: 6 }))
.then(assertRecordEql({ list: [1, 2, 99, 3, 4, 5] }))
.then(cleanup)
})
})
})
})
context('with insert-bounded flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = new Aerospike.ListPolicy({
writeFlags: lists.writeFlags.INSERT_BOUNDED
})
it('returns an error when trying to insert items outside the current bounds of the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.insertItems('list', 10, [99, 100], policy)))
.then(assertError(ListOutOfBoundsError))
.then(cleanup)
})
context('with no-fail flag', function () {
const policy = new Aerospike.ListPolicy({
writeFlags: lists.writeFlags.INSERT_BOUNDED | lists.writeFlags.NO_FAIL
})
it('does not insert the items outside bounds, but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.insertItems('list', 10, [99, 100], policy)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('inserts the items at the specified index of a nested list', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.insertItems('map', 2, [99, 100]).withContext(ctx => ctx.addMapKey('list'))))
.then(assertResultEql({ map: 7 }))
.then(assertRecordEql({ map: { list: [1, 2, 99, 100, 3, 4, 5] } }))
.then(cleanup)
})
})
})
describe('lists.pop', function () {
it('removes the item at the specified index and returns it', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.pop('list', 2)))
.then(assertResultEql({ list: 3 }))
.then(assertRecordEql({ list: [1, 2, 4, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item at the specified index and returns it', function () {
return initState()
.then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] }))
.then(operate(lists.pop('list', 2).withContext(ctx => ctx.addListIndex(0))))
.then(assertResultEql({ list: 3 }))
.then(assertRecordEql({ list: [[1, 2, 4, 5], [6, 7, 8]] }))
.then(cleanup)
})
})
})
describe('lists.popRange', function () {
it('removes the items at the specified range and returns them', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.popRange('list', 2, 2)))
.then(assertResultEql({ list: [3, 4] }))
.then(assertRecordEql({ list: [1, 2, 5] }))
.then(cleanup)
})
it('removes and returns all items starting from the specified index if count is not specified', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.popRange('list', 2)))
.then(assertResultEql({ list: [3, 4, 5] }))
.then(assertRecordEql({ list: [1, 2] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the items in the specified range and returns them', function () {
return initState()
.then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] }))
.then(operate(lists.popRange('list', 2).withContext(ctx => ctx.addListIndex(1))))
.then(assertResultEql({ list: [8] }))
.then(assertRecordEql({ list: [[1, 2, 3, 4, 5], [6, 7]] }))
.then(cleanup)
})
})
})
describe('lists.remove', function () {
it('removes the item at the specified index and returns the number of items removed', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.remove('list', 2)))
.then(assertResultEql({ list: 1 }))
.then(assertRecordEql({ list: [1, 2, 4, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item at the specified index', function () {
return initState()
.then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] }))
.then(operate(lists.remove('list', 2).withContext(ctx => ctx.addListIndex(1))))
.then(assertResultEql({ list: 1 }))
.then(assertRecordEql({ list: [[1, 2, 3, 4, 5], [6, 7]] }))
.then(cleanup)
})
})
})
describe('lists.removeRange', function () {
it('removes the items in the specified range and returns the number of items removed', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeRange('list', 2, 2)))
.then(assertResultEql({ list: 2 }))
.then(assertRecordEql({ list: [1, 2, 5] }))
.then(cleanup)
})
it('removes all items starting from the specified index and returns the number of items removed if count is not specified', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeRange('list', 2)))
.then(assertResultEql({ list: 3 }))
.then(assertRecordEql({ list: [1, 2] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item at the specified range', function () {
return initState()
.then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] }))
.then(operate(lists.removeRange('list', 1, 3).withContext(ctx => ctx.addListIndex(0))))
.then(assertResultEql({ list: 3 }))
.then(assertRecordEql({ list: [[1, 5], [6, 7, 8]] }))
.then(cleanup)
})
})
})
describe('lists.removeByIndex', function () {
context('returnType=VALUE', function () {
it('removes the item at the specified index and returns the value', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeByIndex('list', 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: 3 }))
.then(assertRecordEql({ list: [1, 2, 4, 5] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item at the specified index', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.removeByIndex('map', 2).withContext(ctx => ctx.addMapKey('list'))))
.then(assertRecordEql({ map: { list: [1, 2, 4, 5] } }))
.then(cleanup)
})
})
})
describe('lists.removeByIndexRange', function () {
context('returnType=VALUE', function () {
it('removes the items in the specified range and returns the values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeByIndexRange('list', 2, 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [3, 4] }))
.then(assertRecordEql({ list: [1, 2, 5] }))
.then(cleanup)
})
it('removes the items starting from the specified index and returns the values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeByIndexRange('list', 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [3, 4, 5] }))
.then(assertRecordEql({ list: [1, 2] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item int the specified range', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.removeByIndexRange('map', 1, 3).withContext(ctx => ctx.addMapKey('list'))))
.then(assertRecordEql({ map: { list: [1, 5] } }))
.then(cleanup)
})
})
})
describe('lists.removeByValue', function () {
context('returnType=INDEX', function () {
it('removes all items with the specified value and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.removeByValue('list', 3).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [2, 5] }))
.then(assertRecordEql({ list: [1, 2, 1, 2] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all items with the specified value', function () {
return initState()
.then(createRecord({ list: [[3, 2, 1], [1, 2, 3, 1, 2, 3]] }))
.then(operate(lists.removeByValue('list', 3).withContext(ctx => ctx.addListValue([3, 2, 1]))))
.then(assertRecordEql({ list: [[2, 1], [1, 2, 3, 1, 2, 3]] }))
.then(cleanup)
})
})
})
describe('lists.removeByValueList', function () {
context('returnType=INDEX', function () {
it('removes all items with the specified values and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.removeByValueList('list', [1, 3]).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [0, 2, 3, 5] }))
.then(assertRecordEql({ list: [2, 2] }))
.then(cleanup)
})
})
context('invert results', function () {
it('removes all items except with the specified values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.removeByValueList('list', [1, 3]).invertSelection()))
.then(assertRecordEql({ list: [1, 3, 1, 3] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all items except with the specified values', function () {
return initState()
.then(createRecord({ list: [[3, 2, 1], [1, 2, 3, 1, 2, 3]] }))
.then(operate(
lists
.removeByValueList('list', [1, 4])
.withContext(ctx => ctx.addListIndex(-1))
.invertSelection()
))
.then(assertRecordEql({ list: [[3, 2, 1], [1, 1]] }))
.then(cleanup)
})
})
})
})
describe('lists.removeByValueRange', function () {
context('returnType=INDEX', function () {
it('removes all items in the specified range of values and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.removeByValueRange('list', 2, 5).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [1, 2, 3] }))
.then(assertRecordEql({ list: [1, 5] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all items in the specified range of values', function () {
return initState()
.then(createRecord({ list: [[1, 2, 3, 4, 5], [6, 7, 8]] }))
.then(operate(lists.removeByValueRange('list', 2, 5).withContext(ctx => ctx.addListIndex(0))))
.then(assertRecordEql({ list: [[1, 5], [6, 7, 8]] }))
.then(cleanup)
})
})
})
describe('lists.removeByValueRelRankRange', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
context('with count', function () {
it('removes all items nearest to value and greater, by relative rank', function () {
return initState()
.then(createRecord({ list: [0, 4, 5, 9, 11, 15] }))
.then(orderList('list'))
.then(operate(lists.removeByValueRelRankRange('list', 5, 0, 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [5, 9] }))
.then(assertRecordEql({ list: [0, 4, 11, 15] }))
.then(cleanup)
})
})
context('without count', function () {
it('removes all items nearest to value and greater, by relative rank', function () {
return initState()
.then(createRecord({ list: [0, 4, 5, 9, 11, 15] }))
.then(orderList('list'))
.then(operate(lists.removeByValueRelRankRange('list', 5, 0).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [5, 9, 11, 15] }))
.then(assertRecordEql({ list: [0, 4] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all items nearest to value and greater, by relative rank', function () {
const listContext = new Context().addMapKey('list')
return initState()
.then(createRecord({ map: { list: [0, 4, 5, 9, 11, 15] } }))
.then(orderList('map', listContext))
.then(operate(lists.removeByValueRelRankRange('map', 5, 0, 2).withContext(listContext)))
.then(assertRecordEql({ map: { list: [0, 4, 11, 15] } }))
.then(cleanup)
})
})
})
describe('lists.removeByRank', function () {
context('returnType=VALUE', function () {
it('removes the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 4] }))
.then(operate(lists.removeByRank('list', 1).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: 2 }))
.then(assertRecordEql({ list: [3, 1, 4] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item with the specified list rank', function () {
return initState()
.then(createRecord({ list: [[2, 3, 1, 4], [3, 1, 2, 4]] }))
.then(operate(lists.removeByRank('list', 1).withContext(ctx => ctx.addListIndex(1))))
.then(assertRecordEql({ list: [[2, 3, 1, 4], [3, 1, 4]] }))
.then(cleanup)
})
})
})
describe('lists.removeByRankRange', function () {
context('returnType=VALUE', function () {
it('removes the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 5, 4] }))
.then(operate(lists.removeByRankRange('list', 1, 3).andReturn(lists.returnType.VALUE)))
.then(assertResultSatisfy(result => eql(result.list.sort(), [2, 3, 4])))
.then(assertRecordEql({ list: [1, 5] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes the item with the specified list rank', function () {
return initState()
.then(createRecord({ list: [[3, 1, 2, 5, 4], [1, 2, 3]] }))
.then(operate(lists.removeByRankRange('list', 1, 3).withContext(ctx => ctx.addListIndex(0))))
.then(assertRecordEql({ list: [[1, 5], [1, 2, 3]] }))
.then(cleanup)
})
})
})
describe('lists.clear', function () {
it('removes all elements from the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.clear('list')))
.then(assertRecordEql({ list: [] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all elements from the list', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.clear('map').withContext(ctx => ctx.addMapKey('list'))))
.then(assertRecordEql({ map: { list: [] } }))
.then(cleanup)
})
})
})
describe('lists.set', function () {
it('sets the item at the specified index', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.set('list', 2, 99)))
.then(assertRecordEql({ list: [1, 2, 99, 4, 5] }))
.then(cleanup)
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('fails with an error if the value already exists in the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.set('list', 2, 5, policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('does not set the value but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.set('list', 2, 5, policy)))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('sets the item at the specified index', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.set('map', 2, 99).withContext(ctx => ctx.addMapKey('list'))))
.then(assertRecordEql({ map: { list: [1, 2, 99, 4, 5] } }))
.then(cleanup)
})
})
})
describe('lists.trim', function () {
it('removes all elements not within the specified range and returns the number of elements removed', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.trim('list', 1, 3)))
.then(assertResultEql({ list: 2 }))
.then(assertRecordEql({ list: [2, 3, 4] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('removes all elements not within the specified range', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] }))
.then(operate(lists.trim('list', 1, 3).withContext(ctx => ctx.addListValue([1, 2, 3, 4, 5]))))
.then(assertResultEql({ list: 2 }))
.then(assertRecordEql({ list: [['a', 'b', 'c'], [2, 3, 4]] }))
.then(cleanup)
})
})
})
describe('lists.get', function () {
it('returns the item at the specified index', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.get('list', 2)))
.then(assertResultEql({ list: 3 }))
.then(cleanup)
})
it('should return an error if the index is out of bounds', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.get('list', 99)))
.then(assertError(ListOutOfBoundsError))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('returns the item at the specified index', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] }))
.then(operate(lists.get('list', 2).withContext(ctx => ctx.addListIndex(1))))
.then(assertResultEql({ list: 3 }))
.then(cleanup)
})
})
})
describe('lists.getRange', function () {
it('returns the items in the specified range', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getRange('list', 1, 3)))
.then(assertResultEql({ list: [2, 3, 4] }))
.then(cleanup)
})
it('returns all items starting at the specified index if count is not specified', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getRange('list', 1)))
.then(assertResultEql({ list: [2, 3, 4, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('returns the items in the specified range', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.getRange('map', 1, 3).withContext(ctx => ctx.addMapKey('list'))))
.then(assertResultEql({ map: [2, 3, 4] }))
.then(cleanup)
})
})
})
describe('lists.getByIndex', function () {
context('returnType=VALUE', function () {
it('fetches the item at the specified index and returns its value', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getByIndex('list', 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: 3 }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches the item at the specified index and returns its value', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 4, 5]] }))
.then(operate(
lists
.getByIndex('list', 2)
.withContext(ctx => ctx.addListIndex(1))
.andReturn(lists.returnType.VALUE)
))
.then(assertResultEql({ list: 3 }))
.then(cleanup)
})
})
})
})
describe('lists.getByIndexRange', function () {
context('returnType=VALUE', function () {
it('fetches the items in the specified range and returns the values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getByIndexRange('list', 2, 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [3, 4] }))
.then(cleanup)
})
it('fetches the items starting from the specified index and returns the values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getByIndexRange('list', 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [3, 4, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches the items in the specified range and returns the values', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(
lists
.getByIndexRange('map', 2, 2)
.withContext(ctx => ctx.addMapKey('list'))
.andReturn(lists.returnType.VALUE)
))
.then(assertResultEql({ map: [3, 4] }))
.then(cleanup)
})
})
})
})
describe('lists.getByValue', function () {
context('returnType=INDEX', function () {
it('fetches all items with the specified value and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.getByValue('list', 3).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [2, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches all items with the specified value and returns the indexes', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 1, 2, 3]] }))
.then(operate(
lists
.getByValue('list', 3)
.withContext(ctx => ctx.addListIndex(1))
.andReturn(lists.returnType.INDEX)
))
.then(assertResultEql({ list: [2, 5] }))
.then(cleanup)
})
})
})
})
describe('lists.getByValueList', function () {
context('returnType=INDEX', function () {
it('fetches all items with the specified values and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.getByValueList('list', [1, 3]).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [0, 2, 3, 5] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches all items with the specified values and returns the indexes', function () {
return initState()
.then(createRecord({ list: [['a', 'b', 'c'], [1, 2, 3, 1, 2, 3]] }))
.then(operate(
lists
.getByValueList('list', [1, 3])
.withContext(ctx => ctx.addListIndex(1))
.andReturn(lists.returnType.INDEX)
))
.then(assertResultEql({ list: [0, 2, 3, 5] }))
.then(cleanup)
})
})
})
context('invert results', function () {
it('fetches all items except with the specified values', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 1, 2, 3] }))
.then(operate(lists.getByValueList('list', [1, 3]).invertSelection().andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [1, 4] }))
.then(cleanup)
})
})
})
describe('lists.getByValueRange', function () {
context('returnType=INDEX', function () {
it('fetches all items in the specified range of values and returns the indexes', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.getByValueRange('list', 2, 5).andReturn(lists.returnType.INDEX)))
.then(assertResultEql({ list: [1, 2, 3] }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches all items in the specified range of values and returns the indexes', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(
lists
.getByValueRange('map', 2, 5)
.withContext(ctx => ctx.addMapKey('list'))
.andReturn(lists.returnType.INDEX)
))
.then(assertResultEql({ map: [1, 2, 3] }))
.then(cleanup)
})
})
})
})
describe('lists.getByValueRelRankRange', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
context('with count', function () {
it('fetches all items nearest to value and greater, by relative rank', function () {
return initState()
.then(createRecord({ list: [0, 4, 5, 9, 11, 15] }))
.then(orderList('list'))
.then(operate(lists.getByValueRelRankRange('list', 5, 0, 2).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [5, 9] }))
.then(cleanup)
})
})
context('without count', function () {
it('fetches all items nearest to value and greater, by relative rank', function () {
return initState()
.then(createRecord({ list: [0, 4, 5, 9, 11, 15] }))
.then(orderList('list'))
.then(operate(lists.getByValueRelRankRange('list', 5, 0).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: [5, 9, 11, 15] }))
.then(cleanup)
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches all items nearest to value and greater, by relative rank', function () {
const listContext = new Context().addMapKey('list')
return initState()
.then(createRecord({ map: { list: [0, 4, 5, 9, 11, 15] } }))
.then(orderList('map', listContext))
.then(operate(
lists
.getByValueRelRankRange('map', 5, 0, 2)
.withContext(listContext)
.andReturn(lists.returnType.VALUE)
))
.then(assertResultEql({ map: [5, 9] }))
.then(cleanup)
})
})
})
describe('lists.getByRank', function () {
context('returnType=VALUE', function () {
it('fetches the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 4] }))
.then(operate(lists.getByRank('list', 1).andReturn(lists.returnType.VALUE)))
.then(assertResultEql({ list: 2 }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [[3, 1, 2, 4], ['a', 'b', 'c']] }))
.then(operate(
lists
.getByRank('list', 1)
.withContext(ctx => ctx.addListIndex(0))
.andReturn(lists.returnType.VALUE)
))
.then(assertResultEql({ list: 2 }))
.then(cleanup)
})
})
})
})
describe('lists.getByRankRange', function () {
context('returnType=VALUE', function () {
it('fetches the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [3, 1, 2, 5, 4] }))
.then(operate(lists.getByRankRange('list', 1, 3).andReturn(lists.returnType.VALUE)))
.then(assertResultSatisfy(result => eql(result.list.sort(), [2, 3, 4])))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('fetches the item with the specified list rank and returns the value', function () {
return initState()
.then(createRecord({ list: [[3, 1, 2, 5, 4], ['a', 'b', 'c']] }))
.then(operate(
lists
.getByRankRange('list', 1, 3)
.withContext(ctx => ctx.addListIndex(0))
.andReturn(lists.returnType.VALUE)
))
.then(assertResultSatisfy(result => eql(result.list.sort(), [2, 3, 4])))
.then(cleanup)
})
})
})
})
describe('lists.increment', function () {
helper.skipUnlessVersion('>= 3.15.0', this)
it('increments the element at the specified index and returns the final value', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.increment('list', 1, 3)))
.then(assertResultEql({ list: 5 }))
.then(assertRecordEql({ list: [1, 5, 3, 4, 5] }))
.then(cleanup)
})
it('increments the element at the specified index by one and returns the final value', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.increment('list', 2)))
.then(assertResultEql({ list: 4 }))
.then(assertRecordEql({ list: [1, 2, 4, 4, 5] }))
.then(cleanup)
})
context('ordered lists', function () {
it('reorders the list with the incremented value', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(orderList('list'))
.then(operate(lists.increment('list', 2, 10)))
.then(assertResultEql({ list: 13 }))
.then(assertRecordEql({ list: [1, 2, 4, 5, 13] }))
.then(cleanup)
})
})
context('with add-unique flag', function () {
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE
}
it('fails with an error if the incremented number already exists in the list', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(expectError())
.then(operate(lists.increment('list', 2, 1, policy)))
.then(assertError(status.ERR_FAIL_ELEMENT_EXISTS))
.then(cleanup)
})
context('with no-fail flag', function () {
helper.skipUnlessVersion('>= 4.3.0', this)
const policy = {
writeFlags: lists.writeFlags.ADD_UNIQUE | lists.writeFlags.NO_FAIL
}
it('does not increment the item but returns ok', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.increment('list', 2, 1, policy)))
// Note: Operation returns post-increment value even though
// operation was not executed due to add-unique constraint!
.then(assertResultEql({ list: 4 }))
.then(assertRecordEql({ list: [1, 2, 3, 4, 5] }))
.then(cleanup)
})
})
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('increments the element at the specified index and returns the final value', function () {
return initState()
.then(createRecord({ map: { list: [1, 2, 3, 4, 5] } }))
.then(operate(lists.increment('map', 1, 3).withContext(ctx => ctx.addMapKey('list'))))
.then(assertResultEql({ map: 5 }))
.then(assertRecordEql({ map: { list: [1, 5, 3, 4, 5] } }))
.then(cleanup)
})
})
})
describe('lists.size', function () {
it('returns the element count', function () {
return initState()
.then(createRecord({ list: [1, 2, 3, 4, 5] }))
.then(operate(lists.size('list')))
.then(assertResultEql({ list: 5 }))
.then(cleanup)
})
context('with nested list context', function () {
helper.skipUnlessVersion('>= 4.6.0', this)
it('returns the element count', function () {
return initState()
.then(createRecord({ list: [[], [1, 2, 3, 4, 5]] }))
.then(operate(lists.size('list').withContext(ctx => ctx.addListIndex(-1))))
.then(assertResultEql({ list: 5 }))
.then(cleanup)
})
})
})
describe('ListOperation', function () {
describe('#invertSelection', function () {
it('throws an error if the operation is not invertible', function () {
const op = lists.size('lists')
expect(() => op.invertSelection()).to.throw(AerospikeError, 'List operation cannot be inverted')
})
})
})
})<|fim▁end|>
| |
<|file_name|>filters.py<|end_file_name|><|fim▁begin|>import django_filters
from rest_framework import filters
from waldur_core.core import filters as core_filters
from . import models
class InvoiceFilter(django_filters.FilterSet):
customer = core_filters.URLFilter(
view_name='customer-detail', field_name='customer__uuid'
)
customer_uuid = django_filters.UUIDFilter(field_name='customer__uuid')
state = django_filters.MultipleChoiceFilter(choices=models.Invoice.States.CHOICES)
start_date = django_filters.DateFilter(field_name='created', lookup_expr='gt')
end_date = django_filters.DateFilter(field_name='created', lookup_expr='lt')
o = django_filters.OrderingFilter(fields=('created', 'year', 'month'))
class Meta:
model = models.Invoice
fields = ['created', 'year', 'month']
class PaymentProfileFilter(django_filters.FilterSet):
organization = core_filters.URLFilter(
view_name='customer-detail', field_name='organization__uuid'
)
organization_uuid = django_filters.UUIDFilter(field_name='organization__uuid')
payment_type = django_filters.MultipleChoiceFilter(
choices=models.PaymentType.CHOICES
)
o = django_filters.OrderingFilter(fields=('name', 'payment_type', 'is_active'))
class Meta:
model = models.PaymentProfile
fields = []
class PaymentProfileFilterBackend(filters.BaseFilterBackend):
def filter_queryset(self, request, queryset, view):
if request.user.is_staff or request.user.is_support:
return queryset<|fim▁hole|>
class PaymentFilter(django_filters.FilterSet):
profile = core_filters.URLFilter(
view_name='payment-profile-detail', field_name='profile__uuid'
)
profile_uuid = django_filters.UUIDFilter(field_name='profile__uuid')
class Meta:
model = models.Payment
fields = ['date_of_payment']<|fim▁end|>
|
return queryset.filter(is_active=True)
|
<|file_name|>RecipientsActivity.java<|end_file_name|><|fim▁begin|>package com.elionhaxhi.ribbit.ui;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.app.AlertDialog;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.NavUtils;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.Window;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.elionhaxhi.ribbit.R;
import com.elionhaxhi.ribbit.adapters.UserAdapter;
import com.elionhaxhi.ribbit.utils.FileHelper;
import com.elionhaxhi.ribbit.utils.ParseConstants;
import com.parse.FindCallback;
import com.parse.ParseException;
import com.parse.ParseFile;
import com.parse.ParseInstallation;
import com.parse.ParseObject;
import com.parse.ParsePush;
import com.parse.ParseQuery;
import com.parse.ParseRelation;
import com.parse.ParseUser;
import com.parse.SaveCallback;
public class RecipientsActivity extends Activity {<|fim▁hole|> protected ParseRelation<ParseUser> mFriendsRelation;
protected ParseUser mCurrentUser;
protected MenuItem mSendMenuItem;
protected Uri mMediaUri;
protected String mFileType;
protected GridView mGridView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS);
setContentView(R.layout.user_grid);
//setupActionBar();
mGridView =(GridView)findViewById(R.id.friendsGrid);
mGridView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE);
mGridView.setOnItemClickListener(mOnItemClickListener);
TextView emptyTextView = (TextView)findViewById(android.R.id.empty);
mGridView.setEmptyView(emptyTextView);
mMediaUri = getIntent().getData();
mFileType = getIntent().getExtras().getString(ParseConstants.KEY_FILE_TYPE);
}
@Override
public void onResume(){
super.onResume();
mCurrentUser = ParseUser.getCurrentUser();
mFriendsRelation = mCurrentUser.getRelation(ParseConstants.KEY_FRIENDS_RELATION);
setProgressBarIndeterminateVisibility(true);
ParseQuery<ParseUser> query = mFriendsRelation.getQuery();
query.addAscendingOrder(ParseConstants.KEY_USERNAME);
query.findInBackground(new FindCallback<ParseUser>(){
@Override
public void done(List<ParseUser> friends, ParseException e){
setProgressBarIndeterminateVisibility(false);
if(e == null){
mFriends = friends;
String [] usernames = new String[mFriends.size()];
int i =0;
for(ParseUser user : mFriends){
usernames[i]=user.getUsername();
i++;
}
if(mGridView.getAdapter() == null){
UserAdapter adapter = new UserAdapter(RecipientsActivity.this, mFriends);
mGridView.setAdapter(adapter);
}
else{
((UserAdapter)mGridView.getAdapter()).refill(mFriends);
}
}
else{
Log.e(TAG, e.getMessage());
AlertDialog.Builder builder= new AlertDialog.Builder(RecipientsActivity.this);
builder.setMessage(e.getMessage())
.setTitle(R.string.error_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.reciptient, menu);
mSendMenuItem = menu.getItem(0);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
switch(item.getItemId()){
case android.R.id.home:
NavUtils.navigateUpFromSameTask(this);
return true;
case R.id.action_send:
ParseObject message = createMessage();
if(message == null){
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setMessage(R.string.error_selecting_file)
.setTitle(R.string.error_selection_file_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
else
{
send(message);
finish();
}
return true;
}
return super.onOptionsItemSelected(item);
}
protected ParseObject createMessage(){
ParseObject message = new ParseObject(ParseConstants.CLASS_MESSAGE);
message.put(ParseConstants.KEY_SENDER_ID, ParseUser.getCurrentUser().getObjectId());
message.put(ParseConstants.KEY_SENDER_NAME, ParseUser.getCurrentUser().getUsername());
message.put(ParseConstants.KEY_RECIPIENT_IDS, getRecipientIds());
message.put(ParseConstants.KEY_FILE_TYPE, mFileType);
byte[] fileBytes = FileHelper.getByteArrayFromFile(this, mMediaUri);
if(fileBytes == null){
return null;
}
else{
if(mFileType.equalsIgnoreCase(ParseConstants.TYPE_IMAGE)){
fileBytes = FileHelper.reduceImageForUpload(fileBytes);
}
String fileName = FileHelper.getFileName(this, mMediaUri, mFileType);
ParseFile file = new ParseFile(fileName, fileBytes);
message.put(ParseConstants.KEY_FILE, file);
return message;
}
}
protected ArrayList<String> getRecipientIds(){
ArrayList<String> recipientIds = new ArrayList<String>();
for(int i=0; i< mGridView.getCount(); i++){
if(mGridView.isItemChecked(i)){
recipientIds.add(mFriends.get(i).getObjectId());
}
}
return recipientIds;
}
protected void send(ParseObject message){
message.saveInBackground(new SaveCallback(){
@Override
public void done(ParseException e){
if(e == null){
//success
Toast.makeText(RecipientsActivity.this, R.string.success_message, Toast.LENGTH_LONG).show();
sendPushNotifications();
}
else{
AlertDialog.Builder builder = new AlertDialog.Builder(RecipientsActivity.this);
builder.setMessage(R.string.error_sending_message)
.setTitle(R.string.error_selection_file_title)
.setPositiveButton(android.R.string.ok, null);
AlertDialog dialog = builder.create();
dialog.show();
}
}
});
}
protected OnItemClickListener mOnItemClickListener = new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
if(mGridView.getCheckedItemCount() > 0)
{
mSendMenuItem.setVisible(true);
}
else{
mSendMenuItem.setVisible(false);
}
ImageView checkImageView =(ImageView)findViewById(R.id.checkImageView);
if(mGridView.isItemChecked(position)){
//add recipient
checkImageView.setVisibility(View.VISIBLE);
}
else{
//remove the recipient
checkImageView.setVisibility(View.INVISIBLE);
}
}
};
protected void sendPushNotifications(){
ParseQuery<ParseInstallation> query = ParseInstallation.getQuery();
query.whereContainedIn(ParseConstants.KEY_USER_ID, getRecipientIds());
//send a push notificaton
ParsePush push = new ParsePush();
push.setQuery(query);
push.setMessage(getString(R.string.push_message, ParseUser.getCurrentUser().getUsername()));
push.sendInBackground();
}
}<|fim▁end|>
|
public static final String TAG=RecipientsActivity.class.getSimpleName();
protected List<ParseUser> mFriends;
|
<|file_name|>aggregate.py<|end_file_name|><|fim▁begin|>################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from abc import ABC, abstractmethod
from typing import List, Dict
from apache_beam.coders import PickleCoder, Coder
from pyflink.common import Row, RowKind
from pyflink.common.state import ListState, MapState
from pyflink.fn_execution.coders import from_proto
from pyflink.fn_execution.operation_utils import is_built_in_function, load_aggregate_function
from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend
from pyflink.table import AggregateFunction, FunctionContext
from pyflink.table.data_view import ListView, MapView
def join_row(left: Row, right: Row):
fields = []
for value in left:
fields.append(value)
for value in right:
fields.append(value)
return Row(*fields)
def extract_data_view_specs_from_accumulator(current_index, accumulator):
# for built in functions we extract the data view specs from their accumulator
i = -1
extracted_specs = []
for field in accumulator:
i += 1<|fim▁hole|> if isinstance(field, MapView):
extracted_specs.append(MapViewSpec(
"builtInAgg%df%d" % (current_index, i), i, PickleCoder(), PickleCoder()))
elif isinstance(field, ListView):
extracted_specs.append(ListViewSpec(
"builtInAgg%df%d" % (current_index, i), i, PickleCoder()))
return extracted_specs
def extract_data_view_specs(udfs):
extracted_udf_data_view_specs = []
current_index = -1
for udf in udfs:
current_index += 1
udf_data_view_specs_proto = udf.specs
if not udf_data_view_specs_proto:
if is_built_in_function(udf.payload):
built_in_function = load_aggregate_function(udf.payload)
accumulator = built_in_function.create_accumulator()
extracted_udf_data_view_specs.append(
extract_data_view_specs_from_accumulator(current_index, accumulator))
else:
extracted_udf_data_view_specs.append([])
else:
extracted_specs = []
for spec_proto in udf_data_view_specs_proto:
state_id = spec_proto.name
field_index = spec_proto.field_index
if spec_proto.HasField("list_view"):
element_coder = from_proto(spec_proto.list_view.element_type)
extracted_specs.append(ListViewSpec(state_id, field_index, element_coder))
elif spec_proto.HasField("map_view"):
key_coder = from_proto(spec_proto.map_view.key_type)
value_coder = from_proto(spec_proto.map_view.value_type)
extracted_specs.append(
MapViewSpec(state_id, field_index, key_coder, value_coder))
else:
raise Exception("Unsupported data view spec type: " + spec_proto.type)
extracted_udf_data_view_specs.append(extracted_specs)
if all([len(i) == 0 for i in extracted_udf_data_view_specs]):
return []
return extracted_udf_data_view_specs
class StateListView(ListView):
def __init__(self, list_state: ListState):
super().__init__()
self._list_state = list_state
def get(self):
return self._list_state.get()
def add(self, value):
self._list_state.add(value)
def add_all(self, values):
self._list_state.add_all(values)
def clear(self):
self._list_state.clear()
def __hash__(self) -> int:
return hash([i for i in self.get()])
class StateMapView(MapView):
def __init__(self, map_state: MapState):
super().__init__()
self._map_state = map_state
def get(self, key):
return self._map_state.get(key)
def put(self, key, value) -> None:
self._map_state.put(key, value)
def put_all(self, dict_value) -> None:
self._map_state.put_all(dict_value)
def remove(self, key) -> None:
self._map_state.remove(key)
def contains(self, key) -> bool:
return self._map_state.contains(key)
def items(self):
return self._map_state.items()
def keys(self):
return self._map_state.keys()
def values(self):
return self._map_state.values()
def is_empty(self) -> bool:
return self._map_state.is_empty()
def clear(self) -> None:
return self._map_state.clear()
class DataViewSpec(object):
def __init__(self, state_id, field_index):
self.state_id = state_id
self.field_index = field_index
class ListViewSpec(DataViewSpec):
def __init__(self, state_id, field_index, element_coder):
super(ListViewSpec, self).__init__(state_id, field_index)
self.element_coder = element_coder
class MapViewSpec(DataViewSpec):
def __init__(self, state_id, field_index, key_coder, value_coder):
super(MapViewSpec, self).__init__(state_id, field_index)
self.key_coder = key_coder
self.value_coder = value_coder
class DistinctViewDescriptor(object):
def __init__(self, input_extractor, filter_args):
self._input_extractor = input_extractor
self._filter_args = filter_args
def get_input_extractor(self):
return self._input_extractor
def get_filter_args(self):
return self._filter_args
class RowKeySelector(object):
"""
A simple key selector used to extract the current key from the input Row according to the
group-by field indexes.
"""
def __init__(self, grouping):
self.grouping = grouping
def get_key(self, data: Row):
return Row(*[data[i] for i in self.grouping])
class StateDataViewStore(object):
"""
The class used to manage the DataViews used in :class:`AggsHandleFunction`. Currently
DataView is not supported so it is just a wrapper of the :class:`FunctionContext`.
"""
def __init__(self,
function_context: FunctionContext,
keyed_state_backend: RemoteKeyedStateBackend):
self._function_context = function_context
self._keyed_state_backend = keyed_state_backend
def get_runtime_context(self):
return self._function_context
def get_state_list_view(self, state_name, element_coder):
return StateListView(self._keyed_state_backend.get_list_state(state_name, element_coder))
def get_state_map_view(self, state_name, key_coder, value_coder):
return StateMapView(
self._keyed_state_backend.get_map_state(state_name, key_coder, value_coder))
class AggsHandleFunction(ABC):
"""
The base class for handling aggregate functions.
"""
@abstractmethod
def open(self, state_data_view_store):
"""
Initialization method for the function. It is called before the actual working methods.
:param state_data_view_store: The object used to manage the DataView.
"""
pass
@abstractmethod
def accumulate(self, input_data: Row):
"""
Accumulates the input values to the accumulators.
:param input_data: Input values bundled in a row.
"""
pass
@abstractmethod
def retract(self, input_data: Row):
"""
Retracts the input values from the accumulators.
:param input_data: Input values bundled in a row.
"""
@abstractmethod
def merge(self, accumulators: Row):
"""
Merges the other accumulators into current accumulators.
:param accumulators: The other row of accumulators.
"""
pass
@abstractmethod
def set_accumulators(self, accumulators: Row):
"""
Set the current accumulators (saved in a row) which contains the current aggregated results.
In streaming: accumulators are stored in the state, we need to restore aggregate buffers
from state.
In batch: accumulators are stored in the dict, we need to restore aggregate buffers from
dict.
:param accumulators: Current accumulators.
"""
pass
@abstractmethod
def get_accumulators(self) -> Row:
"""
Gets the current accumulators (saved in a row) which contains the current
aggregated results.
:return: The current accumulators.
"""
pass
@abstractmethod
def create_accumulators(self) -> Row:
"""
Initializes the accumulators and save them to an accumulators row.
:return: A row of accumulators which contains the aggregated results.
"""
pass
@abstractmethod
def cleanup(self):
"""
Cleanup for the retired accumulators state.
"""
pass
@abstractmethod
def get_value(self) -> Row:
"""
Gets the result of the aggregation from the current accumulators.
:return: The final result (saved in a row) of the current accumulators.
"""
pass
@abstractmethod
def close(self):
"""
Tear-down method for this function. It can be used for clean up work.
By default, this method does nothing.
"""
pass
class SimpleAggsHandleFunction(AggsHandleFunction):
"""
A simple AggsHandleFunction implementation which provides the basic functionality.
"""
def __init__(self,
udfs: List[AggregateFunction],
input_extractors: List,
index_of_count_star: int,
count_star_inserted: bool,
udf_data_view_specs: List[List[DataViewSpec]],
filter_args: List[int],
distinct_indexes: List[int],
distinct_view_descriptors: Dict[int, DistinctViewDescriptor]):
self._udfs = udfs
self._input_extractors = input_extractors
self._accumulators = None # type: Row
self._get_value_indexes = [i for i in range(len(udfs))]
if index_of_count_star >= 0 and count_star_inserted:
# The record count is used internally, should be ignored by the get_value method.
self._get_value_indexes.remove(index_of_count_star)
self._udf_data_view_specs = udf_data_view_specs
self._udf_data_views = []
self._filter_args = filter_args
self._distinct_indexes = distinct_indexes
self._distinct_view_descriptors = distinct_view_descriptors
self._distinct_data_views = {}
def open(self, state_data_view_store):
for udf in self._udfs:
udf.open(state_data_view_store.get_runtime_context())
self._udf_data_views = []
for data_view_specs in self._udf_data_view_specs:
data_views = {}
for data_view_spec in data_view_specs:
if isinstance(data_view_spec, ListViewSpec):
data_views[data_view_spec.field_index] = \
state_data_view_store.get_state_list_view(
data_view_spec.state_id,
PickleCoder())
elif isinstance(data_view_spec, MapViewSpec):
data_views[data_view_spec.field_index] = \
state_data_view_store.get_state_map_view(
data_view_spec.state_id,
PickleCoder(),
PickleCoder())
self._udf_data_views.append(data_views)
for key in self._distinct_view_descriptors.keys():
self._distinct_data_views[key] = state_data_view_store.get_state_map_view(
"agg%ddistinct" % key,
PickleCoder(),
PickleCoder())
def accumulate(self, input_data: Row):
for i in range(len(self._udfs)):
if i in self._distinct_data_views:
if len(self._distinct_view_descriptors[i].get_filter_args()) == 0:
filtered = False
else:
filtered = True
for filter_arg in self._distinct_view_descriptors[i].get_filter_args():
if input_data[filter_arg]:
filtered = False
break
if not filtered:
input_extractor = self._distinct_view_descriptors[i].get_input_extractor()
args = input_extractor(input_data)
if args in self._distinct_data_views[i]:
self._distinct_data_views[i][args] += 1
else:
self._distinct_data_views[i][args] = 1
if self._filter_args[i] >= 0 and not input_data[self._filter_args[i]]:
continue
input_extractor = self._input_extractors[i]
args = input_extractor(input_data)
if self._distinct_indexes[i] >= 0:
if args in self._distinct_data_views[self._distinct_indexes[i]]:
if self._distinct_data_views[self._distinct_indexes[i]][args] > 1:
continue
else:
raise Exception(
"The args are not in the distinct data view, this should not happen.")
self._udfs[i].accumulate(self._accumulators[i], *args)
def retract(self, input_data: Row):
for i in range(len(self._udfs)):
if i in self._distinct_data_views:
if len(self._distinct_view_descriptors[i].get_filter_args()) == 0:
filtered = False
else:
filtered = True
for filter_arg in self._distinct_view_descriptors[i].get_filter_args():
if input_data[filter_arg]:
filtered = False
break
if not filtered:
input_extractor = self._distinct_view_descriptors[i].get_input_extractor()
args = input_extractor(input_data)
if args in self._distinct_data_views[i]:
self._distinct_data_views[i][args] -= 1
if self._distinct_data_views[i][args] == 0:
del self._distinct_data_views[i][args]
if self._filter_args[i] >= 0 and not input_data[self._filter_args[i]]:
continue
input_extractor = self._input_extractors[i]
args = input_extractor(input_data)
if self._distinct_indexes[i] >= 0 and \
args in self._distinct_data_views[self._distinct_indexes[i]]:
continue
self._udfs[i].retract(self._accumulators[i], *args)
def merge(self, accumulators: Row):
for i in range(len(self._udfs)):
self._udfs[i].merge(self._accumulators[i], [accumulators[i]])
def set_accumulators(self, accumulators: Row):
if self._udf_data_views:
for i in range(len(self._udf_data_views)):
for index, data_view in self._udf_data_views[i].items():
accumulators[i][index] = data_view
self._accumulators = accumulators
def get_accumulators(self):
return self._accumulators
def create_accumulators(self):
return Row(*[udf.create_accumulator() for udf in self._udfs])
def cleanup(self):
for i in range(len(self._udf_data_views)):
for data_view in self._udf_data_views[i].values():
data_view.clear()
def get_value(self):
return Row(*[self._udfs[i].get_value(self._accumulators[i])
for i in self._get_value_indexes])
def close(self):
for udf in self._udfs:
udf.close()
class RecordCounter(ABC):
"""
The RecordCounter is used to count the number of input records under the current key.
"""
@abstractmethod
def record_count_is_zero(self, acc):
pass
@staticmethod
def of(index_of_count_star):
if index_of_count_star >= 0:
return RetractionRecordCounter(index_of_count_star)
else:
return AccumulationRecordCounter()
class AccumulationRecordCounter(RecordCounter):
def record_count_is_zero(self, acc):
# when all the inputs are accumulations, the count will never be zero
return acc is None
class RetractionRecordCounter(RecordCounter):
def __init__(self, index_of_count_star):
self._index_of_count_star = index_of_count_star
def record_count_is_zero(self, acc):
# We store the counter in the accumulator and the counter is never be null
return acc is None or acc[self._index_of_count_star][0] == 0
class GroupAggFunction(object):
def __init__(self,
aggs_handle: AggsHandleFunction,
key_selector: RowKeySelector,
state_backend: RemoteKeyedStateBackend,
state_value_coder: Coder,
generate_update_before: bool,
state_cleaning_enabled: bool,
index_of_count_star: int):
self.aggs_handle = aggs_handle
self.generate_update_before = generate_update_before
self.state_cleaning_enabled = state_cleaning_enabled
self.key_selector = key_selector
self.state_value_coder = state_value_coder
self.state_backend = state_backend
self.record_counter = RecordCounter.of(index_of_count_star)
def open(self, function_context: FunctionContext):
self.aggs_handle.open(StateDataViewStore(function_context, self.state_backend))
def close(self):
self.aggs_handle.close()
def process_element(self, input_data: Row):
key = self.key_selector.get_key(input_data)
self.state_backend.set_current_key(key)
self.state_backend.clear_cached_iterators()
accumulator_state = self.state_backend.get_value_state(
"accumulators", self.state_value_coder)
accumulators = accumulator_state.value()
if accumulators is None:
if self.is_retract_msg(input_data):
# Don't create a new accumulator for a retraction message. This might happen if the
# retraction message is the first message for the key or after a state clean up.
return
first_row = True
accumulators = self.aggs_handle.create_accumulators()
else:
first_row = False
# set accumulators to handler first
self.aggs_handle.set_accumulators(accumulators)
# get previous aggregate result
pre_agg_value = self.aggs_handle.get_value()
# update aggregate result and set to the newRow
if self.is_accumulate_msg(input_data):
# accumulate input
self.aggs_handle.accumulate(input_data)
else:
# retract input
self.aggs_handle.retract(input_data)
# get current aggregate result
new_agg_value = self.aggs_handle.get_value()
# get accumulator
accumulators = self.aggs_handle.get_accumulators()
if not self.record_counter.record_count_is_zero(accumulators):
# we aggregated at least one record for this key
# update the state
accumulator_state.update(accumulators)
# if this was not the first row and we have to emit retractions
if not first_row:
if not self.state_cleaning_enabled and pre_agg_value == new_agg_value:
# newRow is the same as before and state cleaning is not enabled.
# We do not emit retraction and acc message.
# If state cleaning is enabled, we have to emit messages to prevent too early
# state eviction of downstream operators.
return
else:
# retract previous result
if self.generate_update_before:
# prepare UPDATE_BEFORE message for previous row
retract_row = join_row(key, pre_agg_value)
retract_row.set_row_kind(RowKind.UPDATE_BEFORE)
yield retract_row
# prepare UPDATE_AFTER message for new row
result_row = join_row(key, new_agg_value)
result_row.set_row_kind(RowKind.UPDATE_AFTER)
else:
# this is the first, output new result
# prepare INSERT message for new row
result_row = join_row(key, new_agg_value)
result_row.set_row_kind(RowKind.INSERT)
yield result_row
else:
# we retracted the last record for this key
# sent out a delete message
if not first_row:
# prepare delete message for previous row
result_row = join_row(key, pre_agg_value)
result_row.set_row_kind(RowKind.DELETE)
yield result_row
# and clear all state
accumulator_state.clear()
# cleanup dataview under current key
self.aggs_handle.cleanup()
def on_timer(self, key):
if self.state_cleaning_enabled:
self.state_backend.set_current_key(key)
accumulator_state = self.state_backend.get_value_state(
"accumulators", self.state_value_coder)
accumulator_state.clear()
self.aggs_handle.cleanup()
@staticmethod
def is_retract_msg(data: Row):
return data.get_row_kind() == RowKind.UPDATE_BEFORE \
or data.get_row_kind() == RowKind.DELETE
@staticmethod
def is_accumulate_msg(data: Row):
return data.get_row_kind() == RowKind.UPDATE_AFTER \
or data.get_row_kind() == RowKind.INSERT<|fim▁end|>
|
# TODO: infer the coder from the input types and output type of the built-in functions
|
<|file_name|>ClientYamlTestSuite.java<|end_file_name|><|fim▁begin|>/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.yaml.section;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
/**
* Holds a REST test suite loaded from a specific yaml file.
* Supports a setup section and multiple test sections.
*/
public class ClientYamlTestSuite {
public static ClientYamlTestSuite parse(String api, Path file) throws IOException {
if (!Files.isRegularFile(file)) {
throw new IllegalArgumentException(file.toAbsolutePath() + " is not a file");
}
String filename = file.getFileName().toString();
//remove the file extension
int i = filename.lastIndexOf('.');
if (i > 0) {
filename = filename.substring(0, i);
}
//our yaml parser seems to be too tolerant. Each yaml suite must end with \n, otherwise clients tests might break.
try (FileChannel channel = FileChannel.open(file, StandardOpenOption.READ)) {
ByteBuffer bb = ByteBuffer.wrap(new byte[1]);
if (channel.size() == 0) {
throw new IllegalArgumentException("test suite file " + file.toString() + " is empty");
}
channel.read(bb, channel.size() - 1);
if (bb.get(0) != 10) {
throw new IOException("test suite [" + api + "/" + filename + "] doesn't end with line feed (\\n)");
}
}
try (XContentParser parser = YamlXContent.yamlXContent.createParser(ExecutableSection.XCONTENT_REGISTRY,
LoggingDeprecationHandler.INSTANCE, Files.newInputStream(file))) {
return parse(api, filename, parser);
} catch(Exception e) {
throw new IOException("Error parsing " + api + "/" + filename, e);
}
}
public static ClientYamlTestSuite parse(String api, String suiteName, XContentParser parser) throws IOException {
parser.nextToken();
assert parser.currentToken() == XContentParser.Token.START_OBJECT : "expected token to be START_OBJECT but was "
+ parser.currentToken();
ClientYamlTestSuite restTestSuite = new ClientYamlTestSuite(api, suiteName);
restTestSuite.setSetupSection(SetupSection.parseIfNext(parser));
restTestSuite.setTeardownSection(TeardownSection.parseIfNext(parser));
while(true) {
//the "---" section separator is not understood by the yaml parser. null is returned, same as when the parser is closed
//we need to somehow distinguish between a null in the middle of a test ("---")
// and a null at the end of the file (at least two consecutive null tokens)
if(parser.currentToken() == null) {
if (parser.nextToken() == null) {
break;
}
}
ClientYamlTestSection testSection = ClientYamlTestSection.parse(parser);
if (!restTestSuite.addTestSection(testSection)) {
throw new ParsingException(testSection.getLocation(), "duplicate test section [" + testSection.getName() + "]");
}
}
return restTestSuite;
}
private final String api;
private final String name;
private SetupSection setupSection;
private TeardownSection teardownSection;
private Set<ClientYamlTestSection> testSections = new TreeSet<>();
public ClientYamlTestSuite(String api, String name) {
this.api = api;
this.name = name;
}
public String getApi() {
return api;<|fim▁hole|> }
public String getPath() {
return api + "/" + name;
}
public SetupSection getSetupSection() {
return setupSection;
}
public void setSetupSection(SetupSection setupSection) {
this.setupSection = setupSection;
}
public TeardownSection getTeardownSection() {
return teardownSection;
}
public void setTeardownSection(TeardownSection teardownSection) {
this.teardownSection = teardownSection;
}
/**
* Adds a {@link org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection} to the REST suite
* @return true if the test section was not already present, false otherwise
*/
public boolean addTestSection(ClientYamlTestSection testSection) {
return this.testSections.add(testSection);
}
public List<ClientYamlTestSection> getTestSections() {
return new ArrayList<>(testSections);
}
}<|fim▁end|>
|
}
public String getName() {
return name;
|
<|file_name|>read_hadamard_file.py<|end_file_name|><|fim▁begin|># read_hadamard_file.py
# Reads data from a text file to create a 3D
# version of a given Hadamard Matrix.
# Created by Rick Henderson
# Created on June 4, 2015
# Completed June 5, 2015
# Note: A "Hadamard File" is a text file containing rows
# rows of + and - where the + indicates a 1 or a cube
# and the - represents a 0 or a space.
<|fim▁hole|>nOrder = 12
# You can also change these values if you want to alter the offset between the cubes
xOffset = 1.0
yOffset = 1.0
zOffset = 0 # You would have to alter the code more if you want a 3D array of cubes
xpos = 0
ypos = 0
char_number = 0
# Open the file to read from
# Modified technique from DiveIntoPython3.net/files.html
line_number = 0
with open('c:/had12.txt', encoding='utf-8') as a_file:
for each_row in a_file:
line_number += 1
# Just print the current row to the console as a test
print(each_row.rstrip())
for a_char in each_row:
char_number += 1
# If the current character is +, generate a cube then position it
if a_char == '+':
bpy.ops.mesh.primitive_cube_add(radius=0.5)
bpy.context.object.location[0] = line_number * xOffset
bpy.context.object.location[1] = char_number * yOffset
# Now an entire row has been read, so reset char_number to 0
char_number = 0
# Program Ends<|fim▁end|>
|
import bpy
# Set the order (size) of the matrix
|
<|file_name|>roundtrip.validator.ts<|end_file_name|><|fim▁begin|>import { Directive } from '@angular/core';
import {
FormGroup,
Validator,
AbstractControl,
NG_VALIDATORS,
FormGroupDirective,
} from '@angular/forms';
@Directive({
selector: 'form[round-trip]',
providers: [{ provide: NG_VALIDATORS, useExisting: RoundTrip, multi: true }],
})
export class RoundTrip implements Validator {
validate(control: AbstractControl): any {
let formGroup = <FormGroup>control;
let fromCtrl = formGroup.controls['from'];
let toCtrl = formGroup.controls['to'];
<|fim▁hole|>
let from = fromCtrl.value;
let to = toCtrl.value;
if (from == to) {
return {
'round-trip': {
city: from,
},
};
}
return {};
}
}<|fim▁end|>
|
if (!fromCtrl || !toCtrl) return {};
|
<|file_name|>cubeblocks.py<|end_file_name|><|fim▁begin|>import xml.etree.ElementTree as ET<|fim▁hole|>class Definition(object):
def __init__(self, definition_root):
self._definition_root = definition_root
def type_name(self):
type_id = self._definition_root.find('Id').find('TypeId')
subtype_id = self._definition_root.find('Id').find('SubtypeId')
if type_id is None or not type_id.text:
return subtype_id.text
elif subtype_id is None or not subtype_id.text:
return type_id.text
else:
return type_id.text + ':' + subtype_id.text
@property
def build_time(self):
time = self._definition_root.find('BuildTimeSeconds')
if time is not None:
return float(time.text)
@build_time.setter
def build_time(self, value):
time = self._definition_root.find('BuildTimeSeconds')
if time is None:
time = ET.SubElement(self._definition_root, 'BuildTimeSeconds')
time.text = str(value)
@property
def disassembly_ratio(self):
ratio = self._definition_root.find('DisassembleRatio')
if ratio is not None:
return float(ratio.text)
return 1
@disassembly_ratio.setter
def disassembly_ratio(self, value):
ratio = self._definition_root.find('DisassembleRatio')
if ratio is None:
ratio = ET.SubElement(self._definition_root, 'DisassembleRatio')
ratio.text = str(value)<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
# Create your models here.
class Author(models.Model):
first_name = models.CharField(max_length = 100)<|fim▁hole|> profile_photo = models.ImageField(upload_to="media")
linkedin_link = models.URLField(max_length=250)
personal_website_link = models.URLField(max_length=250, blank=True)
bio = models.TextField()<|fim▁end|>
|
last_name = models.CharField(max_length = 100)
date_of_birth = models.DateField()
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
# Copyright (C) 2011-2013 Serpent Consulting Services Pvt. Ltd. (<http://serpentcs.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>##############################################################################
import table_designer<|fim▁end|>
|
#
|
<|file_name|>aesoracle.rs<|end_file_name|><|fim▁begin|>use matasano::set2::aesoracle;
#[test]
fn test_cryptopals_case() {
let (success, failure) = r!(aesoracle::detect_aes_mode(100));
assert_eq!((success, failure), (100, 0));<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>single.rs<|end_file_name|><|fim▁begin|>use mongodb::stream::StreamConnector;
use mongodb::topology::{TopologyDescription, TopologyType};
use std::fs;
use std::path::Path;
use super::framework::run_suite;
#[test]
fn sdam_single() {
let dir = "tests/json/data/specs/source/server-discovery-and-monitoring/tests/single/";
let paths = fs::read_dir(&Path::new(dir)).unwrap();
for path in paths {
let path2 = path.unwrap().path();
let filename = path2.to_string_lossy();<|fim▁hole|> if filename.ends_with(".json") {
let mut description = TopologyDescription::new(StreamConnector::default());
description.topology_type = TopologyType::Single;
run_suite(&filename, Some(description))
}
}
}<|fim▁end|>
| |
<|file_name|>Utils.py<|end_file_name|><|fim▁begin|># $HeadURL: $
'''
:mod: Utils
Module that collects utility functions.
'''
import fnmatch
from DIRAC import gConfig, S_OK
from DIRAC.Core.Utilities import List
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
__RCSID__ = '$Id: $'
def voimport( base_mod ):
'''
Function to import from extensions, if not found, tries from DIRAC.
'''
# FIXME: A.T.: Use Core.Utilities.ObjectLoader
for ext in gConfig.getValue( 'DIRAC/Extensions', [] ):
try:
return __import__( ext + base_mod, globals(), locals(), ['*'] )
except ImportError:
continue
# If not found in extensions, import it in DIRAC base.
return __import__( base_mod, globals(), locals(), ['*'] )
def getCSTree( csPath = '' ):
'''
Gives the configuration rooted at path in a Python dict. The
result is a Python dictionary that reflects the structure of the
configuration file.
'''
opHelper = Operations()
def getCSTreeAsDict( treePath ):
'''
Function to recursively iterate over a CS tree
'''
csTreeDict = {}
opts = opHelper.getOptionsDict( treePath )
if opts[ 'OK' ]:
opts = opts[ 'Value' ]
for optKey, optValue in opts.items():
if optValue.find( ',' ) > -1:
optValue = List.fromChar( optValue )
else:
optValue = [ optValue ]
csTreeDict[ optKey ] = optValue
secs = opHelper.getSections( treePath )
if secs[ 'OK' ]:
secs = secs[ 'Value' ]
for sec in secs:
secTree = getCSTreeAsDict( '%s/%s' % ( treePath, sec ) )
if not secTree[ 'OK' ]:<|fim▁hole|> csTreeDict[ sec ] = secTree[ 'Value' ]
return S_OK( csTreeDict )
return getCSTreeAsDict( csPath )
def configMatch( candidateParams, configParams ):
'''
For a given configuration, the candidate will be rejected if:
- it is missing at least one of the params in the config
- if a param of the candidate does not match the config params
- if a candidate param is None, is considered as wildcard
'''
for key in candidateParams:
if not key in configParams:
# The candidateParams is missing one of the parameters required
# return False
continue
if candidateParams[ key ] is None:
# None is assumed to be a wildcard (*)
continue
cParameter = candidateParams[ key ]
if not isinstance( cParameter, list ):
cParameter = [ cParameter ]
# We allow using UNIX-like regular expression ( wild-cards ) on the CS
_matches = False
for configItem in configParams[ key ]:
if fnmatch.filter( set( cParameter ), configItem ):
_matches = True
break
if not _matches:
return False
return True
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF<|fim▁end|>
|
return secTree
|
<|file_name|>canteen_website_tags.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django import template
from CanteenWebsite.models import Category
from CanteenWebsite.utils.functions import setting_get
register = template.Library()
@register.simple_tag
def get_setting(name, default=None):
return setting_get(name, default)
@register.inclusion_tag('CanteenWebsite/inclusions/sidebar_category_list.html', takes_context=True)
def sidebar_category_list(context):
categories = Category.objects.all()
try:
current_category = context['current_category']
except:
current_category = None
return {
'current': current_category,
'categories': categories,
}
@register.inclusion_tag('CanteenWebsite/inclusions/pagination.html')
def show_pagination(page):
pagination = page.paginator
page_range = list()
if pagination.num_pages <= 10:
page_range = pagination.page_range
else:
ON_EACH_SIDE = 2
ON_ENDS = 2
DOT = '...'
if page.number > (ON_EACH_SIDE + ON_ENDS):
page_range.extend(range(1, ON_ENDS))
page_range.append(DOT)
page_range.extend(range(page.number - ON_EACH_SIDE, page.number + 1))
else:
page_range.extend(range(1, page.number + 1))
if page.number < (pagination.num_pages - ON_EACH_SIDE - ON_ENDS - 1):
page_range.extend(range(page.number + 1, page.number + ON_EACH_SIDE + 1))
page_range.append(DOT)
page_range.extend(range(pagination.num_pages - ON_ENDS, pagination.num_pages + 1))
else:
page_range.extend(range(page.number + 1, pagination.num_pages + 1))
return {
'page': page,
'pages': page_range<|fim▁hole|>
@register.assignment_tag
def define(val=None):
return val<|fim▁end|>
|
}
|
<|file_name|>Linear-B-Syllabary-regex.js<|end_file_name|><|fim▁begin|>// Regular expression that matches all symbols in the Linear B Syllabary block as per Unicode v5.1.0:<|fim▁hole|><|fim▁end|>
|
/\uD800[\uDC00-\uDC7F]/;
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>"""
Unit tests for django-registration.
These tests assume that you've completed all the prerequisites for
getting django-registration running in the default setup, to wit:
1. You have ``registration`` in your ``INSTALLED_APPS`` setting.
2. You have created all of the templates mentioned in this
application's documentation.
3. You have added the setting ``ACCOUNT_ACTIVATION_DAYS`` to your
settings file.
4. You have URL patterns pointing to the registration and activation
views, with the names ``registration_register`` and
``registration_activate``, respectively, and a URL pattern named
'registration_complete'.
"""
import datetime
import sha
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.core import management
from django.core.urlresolvers import reverse
from django.test import TestCase
from commoner.registration import forms
from commoner.registration.models import RegistrationProfile
from commoner.registration import signals
class RegistrationTestCase(TestCase):
"""
Base class for the test cases; this sets up two users -- one
expired, one not -- which are used to exercise various parts
of the application.
"""
def setUp(self):
self.sample_user = RegistrationProfile.objects.create_inactive_user(username='alice',
password='secret',
email='[email protected]')
self.expired_user = RegistrationProfile.objects.create_inactive_user(username='bob',
password='swordfish',
email='[email protected]')
self.expired_user.date_joined -= datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
self.expired_user.save()
class RegistrationModelTests(RegistrationTestCase):
"""
Tests for the model-oriented functionality of django-registration,
including ``RegistrationProfile`` and its custom manager.
"""
def test_new_user_is_inactive(self):
"""
Test that a newly-created user is inactive.
"""
self.failIf(self.sample_user.is_active)
def test_registration_profile_created(self):
"""
Test that a ``RegistrationProfile`` is created for a new user.
"""
self.assertEqual(RegistrationProfile.objects.count(), 2)
def test_activation_email(self):
"""
Test that user signup sends an activation email.
"""
self.assertEqual(len(mail.outbox), 2)
def test_activation_email_disable(self):
"""
Test that activation email can be disabled.
"""
RegistrationProfile.objects.create_inactive_user(username='noemail',
password='foo',
email='[email protected]',
send_email=False)
self.assertEqual(len(mail.outbox), 2)
<|fim▁hole|> Test that user activation actually activates the user and
properly resets the activation key, and fails for an
already-active or expired user, or an invalid key.
"""
# Activating a valid user returns the user.
self.failUnlessEqual(RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.sample_user).activation_key).pk,
self.sample_user.pk)
# The activated user must now be active.
self.failUnless(User.objects.get(pk=self.sample_user.pk).is_active)
# The activation key must now be reset to the "already activated" constant.
self.failUnlessEqual(RegistrationProfile.objects.get(user=self.sample_user).activation_key,
RegistrationProfile.ACTIVATED)
# Activating an expired user returns False.
self.failIf(RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.expired_user).activation_key))
# Activating from a key that isn't a SHA1 hash returns False.
self.failIf(RegistrationProfile.objects.activate_user('foo'))
# Activating from a key that doesn't exist returns False.
self.failIf(RegistrationProfile.objects.activate_user(sha.new('foo').hexdigest()))
def test_account_expiration_condition(self):
"""
Test that ``RegistrationProfile.activation_key_expired()``
returns ``True`` for expired users and for active users, and
``False`` otherwise.
"""
# Unexpired user returns False.
self.failIf(RegistrationProfile.objects.get(user=self.sample_user).activation_key_expired())
# Expired user returns True.
self.failUnless(RegistrationProfile.objects.get(user=self.expired_user).activation_key_expired())
# Activated user returns True.
RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user=self.sample_user).activation_key)
self.failUnless(RegistrationProfile.objects.get(user=self.sample_user).activation_key_expired())
def test_expired_user_deletion(self):
"""
Test that
``RegistrationProfile.objects.delete_expired_users()`` deletes
only inactive users whose activation window has expired.
"""
RegistrationProfile.objects.delete_expired_users()
self.assertEqual(RegistrationProfile.objects.count(), 1)
def test_management_command(self):
"""
Test that ``manage.py cleanupregistration`` functions
correctly.
"""
management.call_command('cleanupregistration')
self.assertEqual(RegistrationProfile.objects.count(), 1)
def test_signals(self):
"""
Test that the ``user_registered`` and ``user_activated``
signals are sent, and that they send the ``User`` as an
argument.
"""
def receiver(sender, **kwargs):
self.assert_('user' in kwargs)
self.assertEqual(kwargs['user'].username, u'signal_test')
received_signals.append(kwargs.get('signal'))
received_signals = []
expected_signals = [signals.user_registered, signals.user_activated]
for signal in expected_signals:
signal.connect(receiver)
RegistrationProfile.objects.create_inactive_user(username='signal_test',
password='foo',
email='[email protected]',
send_email=False)
RegistrationProfile.objects.activate_user(RegistrationProfile.objects.get(user__username='signal_test').activation_key)
self.assertEqual(received_signals, expected_signals)
class RegistrationFormTests(RegistrationTestCase):
"""
Tests for the forms and custom validation logic included in
django-registration.
"""
fixtures = ['test_codes.json',]
def test_registration_form(self):
"""
Test that ``RegistrationForm`` enforces username constraints
and matching passwords.
"""
invalid_data_dicts = [
# Non-alphanumeric username.
{
'data':
{ 'username': 'foo/bar',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo',
'agree_to_tos': 'on',},
'error':
('username', [u"Enter a valid value."])
},
# Already-existing username.
{
'data':
{ 'username': 'alice',
'email': '[email protected]',
'password1': 'secret',
'password2': 'secret',
'agree_to_tos': 'on', },
'error':
('username', [u"This username is already taken. Please choose another."])
},
# Mismatched passwords.
{
'data':
{ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'bar',
'agree_to_tos': 'on', },
'error':
('__all__', [u"You must type the same password each time"])
},
# Must agree to TOS
{
'data':
{ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo',
'agree_to_tos': False, },
'error':
('agree_to_tos', [u"You must agree to the terms to register"])
},
]
for invalid_dict in invalid_data_dicts:
form = forms.RegistrationForm(data=invalid_dict['data'])
self.failIf(form.is_valid())
self.assertEqual(form.errors[invalid_dict['error'][0]], invalid_dict['error'][1])
form = forms.RegistrationForm(data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo',
'agree_to_tos':'on',
'promo_code':'12345678'})
self.failUnless(form.is_valid())
class RegistrationViewTests(RegistrationTestCase):
"""
Tests for the views included in django-registration.
"""
def _test_registration_view(self):
"""
Underscored to prevent running while free accounts are prohibited
Test that the registration view rejects invalid submissions,
and creates a new user and redirects after a valid submission.
"""
# Invalid data fails.
response = self.client.post(reverse('registration_register'),
data={ 'username': 'alice', # Will fail on username uniqueness.
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo' })
self.assertEqual(response.status_code, 200)
self.failUnless(response.context[0]['form'])
self.failUnless(response.context[0]['form'].errors)
response = self.client.post(reverse('registration_register'),
data={ 'username': 'foo',
'email': '[email protected]',
'password1': 'foo',
'password2': 'foo',
'agree_to_tos':'on'})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/check_inbox.html')
self.assertEqual(RegistrationProfile.objects.count(), 3)
def test_activation_view(self):
"""
Test that the activation view activates the user from a valid
key and fails if the key is invalid or has expired.
"""
# Valid user puts the user account into the context.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.objects.get(user=self.sample_user).activation_key }))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[0]['account'].pk, self.sample_user.pk)
# Expired user sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.objects.get(user=self.expired_user).activation_key }))
self.assertEqual(response.status_code, 404)
# Invalid key gets to the view, but sets account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': 'foo' }))
# hmmm, need an assertion here
# Nonexistent key sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': sha.new('foo').hexdigest() }))
self.assertEqual(response.status_code, 404)<|fim▁end|>
|
def test_activation(self):
"""
|
<|file_name|>adnoce.js<|end_file_name|><|fim▁begin|>var db = require('mongoose');
var Log = require('log'), log = new Log('info');
var clienttracking = require('./clienttracking.js');
var mapreduce = require('./mapreduce.js');
var io = null;
exports.server = require('./adnoceserver.js');
exports.setDatabase = function(databaseConfiguration, callback) {
var port = databaseConfiguration.port || '27017';
var opts = databaseConfiguration.options || {};
db.connect('mongodb://'+databaseConfiguration.host+':'+port+'/'+databaseConfiguration.name, opts, function(){
log.info('adnoce core - creating database connection to "%s" on host "%s:%s", status: %s', databaseConfiguration.name, databaseConfiguration.host, port, db.connection.readyState);
if (db.connection.readyState != 1) {
log.error('adnoce core - database connection not ready yet');
}
if (typeof(callback) === 'function') callback(db);
});
}
exports.setServerSocketIO = function(io_, path_) {
var path = path_ || '/adnoce';
io = io_.of(path).authorization(function (handshakeData, callback) {
// @TODO: auth (e.g. ip-based on handshakeData.address)<|fim▁hole|>}
var socketConnection = function(socket_) {
log.info('adnoce core - server socket client "%s" connected to endpoint "%s"', socket_.handshake.address.address, socket_.flags.endpoint);
}
exports.clientTrackingScript = function(req, res) {
res.set({'Content-Type': 'application/javascript', 'Cache-Control': 'no-cache'});
res.send(200, clienttracking.getClientTrackingScript(req));
var additionalData = req.adnoceData || {};
additionalData.adnocetype = 1;
clienttracking.processRequest(req, additionalData);
};
exports.clientTrackingScriptUpdate = function(req, res) {
res.set({'Content-Type': 'text/plain', 'Cache-Control': 'no-cache'});
if (!req.param('p')) res.send(400, '0'); else {
res.send(200, '1');
var additionalData = req.adnoceData || {};
if (req.param('t')) additionalData.adnocetype = req.param('t');
clienttracking.updateSessionData(req.sessionID, req.param('p'), additionalData);
}
};
exports.addEvent = function(type, name, sessionId, additionalData) {
clienttracking.addEvent(type, name, sessionId, additionalData);
};
exports.MapReduce = mapreduce.MapReduce;
var pushServerHealth = function(serverOSObject) {
io.emit('health', {uptime: serverOSObject.uptime(), load: serverOSObject.loadavg(), memory: {total: serverOSObject.totalmem(), free: serverOSObject.freemem()}});
}
exports.pushServerHealth = pushServerHealth;<|fim▁end|>
|
callback(null, true);
}).on('connection', socketConnection);
clienttracking.setSocketIO(io);
|
<|file_name|>do_help.js<|end_file_name|><|fim▁begin|>// Copyright JS Foundation and other contributors, http://js.foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software<|fim▁hole|>
print("help tests");<|fim▁end|>
|
// distributed under the License is distributed on an "AS IS" BASIS
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.