prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>staging.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from dasdocc.conf import base<|fim▁end|> | |
<|file_name|>pylint_extensions.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# Copyright 2018 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements additional custom Pylint checkers to be used as part of
presubmit checks. Next message id would be C0029.
"""
from __future__ import absolute_import # pylint: disable=import-only-modules
from __future__ import unicode_literals # pylint: disable=import-only-modules
import linecache
import os
import re
import sys
import tokenize
import python_utils
from .. import docstrings_checker
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.9.4')
sys.path.insert(0, _PYLINT_PATH)
# List of punctuation symbols that can be used at the end of
# comments and docstrings.
ALLOWED_TERMINATING_PUNCTUATIONS = ['.', '?', '}', ']', ')']
# If any of these phrases are found inside a docstring or comment,
# the punctuation and capital letter checks will be skipped for that
# comment or docstring.
EXCLUDED_PHRASES = [
'coding:', 'pylint:', 'http://', 'https://', 'scripts/', 'extract_node']
import astroid # isort:skip pylint: disable=wrong-import-order, wrong-import-position
from pylint import checkers # isort:skip pylint: disable=wrong-import-order, wrong-import-position
from pylint import interfaces # isort:skip pylint: disable=wrong-import-order, wrong-import-position
from pylint.checkers import typecheck # isort:skip pylint: disable=wrong-import-order, wrong-import-position
from pylint.checkers import utils as checker_utils # isort:skip pylint: disable=wrong-import-order, wrong-import-position
from pylint.extensions import _check_docs_utils # isort:skip pylint: disable=wrong-import-order, wrong-import-position
def read_from_node(node):
"""Returns the data read from the ast node in unicode form.
Args:
node: astroid.scoped_nodes.Function. Node to access module content.
Returns:
list(str). The data read from the ast node.
"""
return list([line.decode('utf-8') for line in node.stream().readlines()])
class ExplicitKeywordArgsChecker(checkers.BaseChecker):
"""Custom pylint checker which checks for explicit keyword arguments
in any function call.
"""
__implements__ = interfaces.IAstroidChecker
name = 'explicit-keyword-args'
priority = -1
msgs = {
'C0001': (
'Keyword argument %s should be named explicitly in %s call of %s.',
'non-explicit-keyword-args',
'All keyword arguments should be explicitly named in function call.'
),
'C0027': (
'Keyword argument %s used for a non keyword argument in %s '
'call of %s.',
'arg-name-for-non-keyword-arg',
'Position arguments should not be used as keyword arguments '
'in function call.'
),
}
def _check_non_explicit_keyword_args(
self, node, name, callable_name, keyword_args,
num_positional_args_unused, num_mandatory_parameters):
"""Custom pylint check to ensure that position arguments should not
be used as keyword arguments.
Args:
node: astroid.node.Function. The current function call node.
name: str. Name of the keyword argument.
callable_name: str. Name of method type.
keyword_args: list(str). Name of all keyword arguments in function
call.
num_positional_args_unused: int. Number of unused positional
arguments.
num_mandatory_parameters: int. Number of mandatory parameters.
Returns:
int. Number of unused positional arguments.
"""
display_name = repr(name)
if name not in keyword_args and (
num_positional_args_unused > (
num_mandatory_parameters)) and (
callable_name != 'constructor'):
# This try/except block tries to get the function
# name. Since each node may differ, multiple
# blocks have been used.
try:
func_name = node.func.attrname
except AttributeError:
func_name = node.func.name
self.add_message(
'non-explicit-keyword-args', node=node,
args=(
display_name,
callable_name,
func_name))
num_positional_args_unused -= 1
return num_positional_args_unused
def _check_argname_for_nonkeyword_arg(
self, node, called, callable_name, keyword_args,
keyword_args_in_funcdef):
"""Custom pylint check to ensure that position arguments should not
be used as keyword arguments.
Args:
node: astroid.node.Function. The current function call node.
called: astroid.Call. The function call object.
keyword_args: list(str). Name of all keyword arguments in function
call.
callable_name: str. Name of method type.
keyword_args_in_funcdef: list(str). Name of all keyword arguments in
function definition.
"""
for arg in keyword_args:
# TODO(#10038): Fix the check to cover below case as well.
# If there is *args and **kwargs in the function definition skip the
# check because we can use keywords arguments in function call even
# if **kwargs is present in the function definition. See Example:
# Function def -> def func(entity_id, *args, **kwargs):
# Function call -> func(entity_id='1', a=1, b=2, c=3)
# By parsing calling method we get
# keyword_arguments = entity_id, a, b, c.
# From the function definition, we will get keyword_arguments = []
# Now we do not have a way to identify which one is a keyword
# argument and which one is not.
if not called.args.kwarg and callable_name != 'constructor':
if not arg in keyword_args_in_funcdef:
# This try/except block tries to get the function
# name.
try:
func_name = node.func.attrname
except AttributeError:
func_name = node.func.name
self.add_message(
'arg-name-for-non-keyword-arg', node=node,
args=(repr(arg), callable_name, func_name))
def visit_call(self, node):
"""Visits each function call in a lint check.
Args:
node: Call. The current function call node.
"""
called = checker_utils.safe_infer(node.func)
try:
# For the rationale behind the Pylint pragma below,
# see https://stackoverflow.com/a/35701863/8115428
called, implicit_args, callable_name = (
typecheck._determine_callable(called)) # pylint: disable=protected-access
except ValueError:
return
if called.args.args is None:
# Built-in functions have no argument information.
return
if len(called.argnames()) != len(set(called.argnames())):
return
# Build the set of keyword arguments and count the positional arguments.
call_site = astroid.arguments.CallSite.from_call(node)
num_positional_args = len(call_site.positional_arguments)
keyword_args = list(call_site.keyword_arguments.keys())
already_filled_positionals = getattr(called, 'filled_positionals', 0)
already_filled_keywords = getattr(called, 'filled_keywords', {})
keyword_args += list(already_filled_keywords)
num_positional_args += already_filled_positionals
num_positional_args += implicit_args
# Analyze the list of formal parameters.
num_mandatory_parameters = len(called.args.args) - len(<|fim▁hole|>
parameters = []
parameter_name_to_index = {}
for i, arg in enumerate(called.args.args):
if isinstance(arg, astroid.Tuple):
name = None
else:
assert isinstance(arg, astroid.AssignName)
name = arg.name
parameter_name_to_index[name] = i
if i >= num_mandatory_parameters:
defval = called.args.defaults[i - num_mandatory_parameters]
else:
defval = None
parameters.append([(name, defval), False])
num_positional_args_unused = num_positional_args
# The list below will store all the keyword arguments present in the
# function definition.
keyword_args_in_funcdef = []
# Check that all parameters with a default value have
# been called explicitly.
for [(name, defval), _] in parameters:
if defval:
keyword_args_in_funcdef.append(name)
num_positional_args_unused = (
self._check_non_explicit_keyword_args(
node, name, callable_name, keyword_args,
num_positional_args_unused, num_mandatory_parameters))
self._check_argname_for_nonkeyword_arg(
node, called, callable_name, keyword_args, keyword_args_in_funcdef)
class HangingIndentChecker(checkers.BaseChecker):
"""Custom pylint checker which checks for break after parenthesis in case
of hanging indentation.
"""
__implements__ = interfaces.ITokenChecker
name = 'hanging-indent'
priority = -1
msgs = {
'C0002': (
(
'There should be a break after parenthesis when content within '
'parenthesis spans multiple lines.'),
'no-break-after-hanging-indent',
(
'If something within parenthesis extends along multiple lines, '
'break after opening parenthesis.')
),
}
def process_tokens(self, tokens):
"""Process tokens to check if there is a line break after the bracket.
Args:
tokens: astroid.Tokens. Object to process tokens.
"""
escape_character_indicator = b'\\'
string_indicator = b'\''
excluded = False
for (token_type, token, (line_num, _), _, line) in tokens:
# Check if token type is an operator and is either a
# left parenthesis '(' or a right parenthesis ')'.
if token_type == tokenize.OP and (
token == b'(' or token == b')'):
line = line.strip()
# Exclude 'if', 'elif', 'while' statements.
if line.startswith((b'if ', b'while ', b'elif ')):
excluded = True
# Skip check if there is a comment at the end of line.
if excluded:
split_line = line.split()
if '#' in split_line:
comment_index = split_line.index('#')
if split_line[comment_index - 1].endswith(b'):'):
excluded = False
elif line.endswith(b'):'):
excluded = False
if excluded:
continue
bracket_count = 0
line_length = len(line)
escape_character_found = False
in_string = False
for char_num in python_utils.RANGE(line_length):
char = line[char_num]
if in_string and (
char == escape_character_indicator or
escape_character_found):
escape_character_found = not escape_character_found
continue
# Check if we found the string indicator and flip the
# in_string boolean.
if char == string_indicator:
in_string = not in_string
# Ignore anything inside a string.
if in_string:
continue
if char == b'(':
if bracket_count == 0:
position = char_num
bracket_count += 1
elif char == b')' and bracket_count > 0:
bracket_count -= 1
if bracket_count > 0 and position + 1 < line_length:
# Allow the use of '[', ']', '{', '}' after the parenthesis.
separators = set('[{( ')
if line[line_length - 1] in separators:
continue
content = line[position + 1:]
# Skip check if there is nothing after the bracket.
split_content = content.split()
# Skip check if there is a comment at the end of line.
if '#' in split_content:
comment_index = split_content.index('#')
if comment_index == 0:
continue
else:
if split_content[comment_index - 1].endswith(b'('):
continue
self.add_message(
'no-break-after-hanging-indent', line=line_num)
# The following class was derived from
# https://github.com/PyCQA/pylint/blob/377cc42f9e3116ff97cddd4567d53e9a3e24ebf9/pylint/extensions/docparams.py#L26
class DocstringParameterChecker(checkers.BaseChecker):
"""Checker for Sphinx, Google, or Numpy style docstrings
* Check that all function, method and constructor parameters are mentioned
in the params and types part of the docstring. Constructor parameters
can be documented in either the class docstring or ``__init__`` docstring,
but not both.
* Check that there are no naming inconsistencies between the signature and
the documentation, i.e. also report documented parameters that are missing
in the signature. This is important to find cases where parameters are
renamed only in the code, not in the documentation.
* Check that all explicitly raised exceptions in a function are documented
in the function docstring. Caught exceptions are ignored.
Args:
linter: Pylinter. The linter object.
"""
__implements__ = interfaces.IAstroidChecker
name = 'parameter_documentation'
msgs = {
'W9005': (
'"%s" has constructor parameters '
'documented in class and __init__',
'multiple-constructor-doc',
'Please remove parameter declarations '
'in the class or constructor.'),
'W9006': (
'"%s" not documented as being raised',
'missing-raises-doc',
'Please document exceptions for '
'all raised exception types.'),
'W9008': (
'Redundant returns documentation',
'redundant-returns-doc',
'Please remove the return/rtype '
'documentation from this method.'),
'W9010': (
'Redundant yields documentation',
'redundant-yields-doc',
'Please remove the yields documentation from this method.'),
'W9011': (
'Missing return documentation',
'missing-return-doc',
'Please add documentation about what this method returns.',
{'old_names': [('W9007', 'missing-returns-doc')]}),
'W9012': (
'Missing return type documentation',
'missing-return-type-doc',
'Please document the type returned by this method.',
# We can't use the same old_name for two different warnings
# {'old_names': [('W9007', 'missing-returns-doc')]}.
),
'W9013': (
'Missing yield documentation',
'missing-yield-doc',
'Please add documentation about what this generator yields.',
{'old_names': [('W9009', 'missing-yields-doc')]}),
'W9014': (
'Missing yield type documentation',
'missing-yield-type-doc',
'Please document the type yielded by this method.',
# We can't use the same old_name for two different warnings
# {'old_names': [('W9009', 'missing-yields-doc')]}.
),
'W9015': (
'"%s" missing in parameter documentation',
'missing-param-doc',
'Please add parameter declarations for all parameters.',
{'old_names': [('W9003', 'missing-param-doc')]}),
'W9016': (
'"%s" missing in parameter type documentation',
'missing-type-doc',
'Please add parameter type declarations for all parameters.',
{'old_names': [('W9004', 'missing-type-doc')]}),
'W9017': (
'"%s" differing in parameter documentation',
'differing-param-doc',
'Please check parameter names in declarations.',
),
'W9018': (
'"%s" differing in parameter type documentation',
'differing-type-doc',
'Please check parameter names in type declarations.',
),
'W9019': (
'Line starting with "%s" requires 4 space indentation relative to'
' args line indentation',
'4-space-indentation-for-arg-parameters-doc',
'Please use 4 space indentation in parameter definitions relative'
' to the args line indentation.'
),
'W9020': (
'Line starting with "%s" requires 8 space indentation relative to'
' args line indentation',
'8-space-indentation-for-arg-in-descriptions-doc',
'Please indent wrap-around descriptions by 8 relative to the args'
' line indentation.'
),
'W9021': (
'Args: indentation is incorrect, must be at the outermost'
' indentation level.',
'incorrect-indentation-for-arg-header-doc',
'Please indent args line to the outermost indentation level.'
),
'W9022': (
'4 space indentation in docstring.',
'4-space-indentation-in-docstring',
'Please use 4 space indentation for parameters relative to section'
' headers.'
),
'W9023': (
'8 space indentation in docstring.',
'8-space-indentation-in-docstring',
'Please use 8 space indentation in wrap around messages'
' relative to section headers.'
),
'W9024': (
'Raises section should be the following form: Exception_name. '
'Description.',
'malformed-raises-section',
'The parameter is incorrectly formatted.'
),
'W9025': (
'Period is not used at the end of the docstring.',
'no-period-used',
'Please use a period at the end of the docstring,'
),
'W9026': (
'Multiline docstring should end with a new line.',
'no-newline-used-at-end',
'Please end multiline docstring with a new line.'
),
'W9027': (
'Single line docstring should not span two lines.',
'single-line-docstring-span-two-lines',
'Please do not use two lines for a single line docstring. '
'If line length exceeds 80 characters, '
'convert the single line docstring to a multiline docstring.'
),
'W9028': (
'Empty line before the end of multi-line docstring.',
'empty-line-before-end',
'Please do not use empty line before '
'the end of the multi-line docstring.'
),
'W9029': (
'Space after """ in docstring.',
'space-after-triple-quote',
'Please do not use space after """ in docstring.'
),
'W9030': (
'Missing single newline below class docstring.',
'newline-below-class-docstring',
'Please add a single newline below class docstring.'
),
'W9031': (
'Files must have a single newline above args in doc string.',
'single-space-above-args',
'Please enter a single newline above args in doc string.'
),
'W9032': (
'Files must have a single newline above returns in doc string.',
'single-space-above-returns',
'Please enter a single newline above returns in doc string.'
),
'W9033': (
'Files must have a single newline above raises in doc string.',
'single-space-above-raises',
'Please enter a single newline above raises in doc string.'
),
'W9034': (
'Files must have a single newline above yield in doc string.',
'single-space-above-yield',
'Please enter a single newline above yield in doc string.'
),
'W9035': (
'Arguments should be in following form: variable_name: typeinfo. '
'Description.',
'malformed-args-section',
'The parameter is incorrectly formatted.'
),
'W9036': (
'Returns should be in the following form: typeinfo. Description.',
'malformed-returns-section',
'The parameter is incorrectly formatted.'
),
'W9037': (
'Yields should be in the following form: typeinfo. Description.',
'malformed-yields-section',
'The parameter is incorrectly formatted.'
),
'W9038': (
'Arguments starting with *args should be formatted in the following'
' form: *args: list(*). Description.',
'malformed-args-argument',
'The parameter is incorrectly formatted.'
)
}
options = (
(
'accept-no-param-doc',
{'default': True, 'type': 'yn', 'metavar': '<y or n>',
'help': 'Whether to accept totally missing parameter '
'documentation in the docstring of a '
'function that has parameters.'
}),
(
'accept-no-raise-doc',
{'default': True, 'type': 'yn', 'metavar': '<y or n>',
'help': 'Whether to accept totally missing raises '
'documentation in the docstring of a function that '
'raises an exception.'
}),
(
'accept-no-return-doc',
{'default': True, 'type': 'yn', 'metavar': '<y or n>',
'help': 'Whether to accept totally missing return '
'documentation in the docstring of a function that '
'returns a statement.'
}),
(
'accept-no-yields-doc',
{'default': True, 'type': 'yn', 'metavar': '<y or n>',
'help': 'Whether to accept totally missing yields '
'documentation in the docstring of a generator.'
}),
)
priority = -2
constructor_names = {'__init__', '__new__'}
not_needed_param_in_docstring = {'self', 'cls'}
docstring_sections = {'Raises:', 'Returns:', 'Yields:'}
# Docstring section headers split up into arguments, returns, yields
# and raises sections signifying that we are currently parsing the
# corresponding section of that docstring.
DOCSTRING_SECTION_RETURNS = 'returns'
DOCSTRING_SECTION_YIELDS = 'yields'
DOCSTRING_SECTION_RAISES = 'raises'
def visit_classdef(self, node):
"""Visit each class definition in a module and check if there is a
single new line below each class docstring.
Args:
node: astroid.nodes.ClassDef. Node for a class definition
in the AST.
"""
# Check if the given node has docstring.
if node.doc is None:
return
line_number = node.fromlineno
# Iterate till the start of docstring.
while True:
line = linecache.getline(node.root().file, line_number).strip()
if line.startswith((b'"""', b'\'\'\'', b'\'', b'"')):
break
else:
line_number += 1
doc_length = len(node.doc.split(b'\n'))
line_number += doc_length
first_line_after_doc = linecache.getline(
node.root().file, line_number).strip()
second_line_after_doc = linecache.getline(
node.root().file, line_number + 1).strip()
if first_line_after_doc != b'':
self.add_message('newline-below-class-docstring', node=node)
elif second_line_after_doc == b'':
self.add_message('newline-below-class-docstring', node=node)
def visit_functiondef(self, node):
"""Called for function and method definitions (def).
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
node_doc = docstrings_checker.docstringify(node.doc)
self.check_functiondef_params(node, node_doc)
self.check_functiondef_returns(node, node_doc)
self.check_functiondef_yields(node, node_doc)
self.check_docstring_style(node)
self.check_docstring_section_indentation(node)
self.check_typeinfo(node, node_doc)
def check_typeinfo(self, node, node_doc):
"""Checks whether all parameters in a function definition are
properly formatted.
Args:
node: astroid.node.Function. Node for a function or
method definition in the AST.
node_doc: Docstring. Pylint Docstring class instance representing
a node's docstring.
"""
# The regexes are taken from the pylint codebase and are modified
# according to our needs. Link: https://github.com/PyCQA/pylint/blob/
# e89c361668aeead9fd192d5289c186611ef779ca/pylint/extensions/
# _check_docs_utils.py#L428.
re_param_line = re.compile(
r"""
\s* \*{{0,2}}(\w+) # identifier potentially with asterisks
\s* ( [:]
\s*
({type}|\S*|[\s\S]*)
(?:,\s+optional)?
[.]+\s )+ \s*
\s* [A-Z0-9](.*)[.\]}}\)]+$ # beginning of optional description
""".format(
type=_check_docs_utils.GoogleDocstring.re_multiple_type,
), flags=re.X | re.S | re.M)
re_returns_line = re.compile(
r"""
\s* (({type}|\S*|[\s\S]*).[.]+\s)+ # identifier
\s* [A-Z0-9](.*)[.\]}}\)]+$ # beginning of description
""".format(
type=_check_docs_utils.GoogleDocstring.re_multiple_type,
), flags=re.X | re.S | re.M)
re_yields_line = re_returns_line
re_raise_line = re.compile(
r"""
\s* ({type}[.])+ # identifier
\s* [A-Z0-9](.*)[.\]}}\)]+$ # beginning of description
""".format(
type=_check_docs_utils.GoogleDocstring.re_multiple_type,
), flags=re.X | re.S | re.M)
# We need to extract the information from the given section for that
# we need to use _parse_section as this will extract all the arguments
# from the Args section, as this is a private method hence we need to
# use the pylint pragma to escape the pylint warning.
if node_doc.has_params():
entries = node_doc._parse_section( # pylint: disable=protected-access
_check_docs_utils.GoogleDocstring.re_param_section)
for entry in entries:
if entry.lstrip().startswith('*args') and not (
entry.lstrip().startswith('*args: list(*)')):
self.add_message('malformed-args-argument', node=node)
match = re_param_line.match(entry)
if not match:
self.add_message('malformed-args-section', node=node)
# We need to extract the information from the given section for that
# we need to use _parse_section as this will extract all the returns
# from the Returns section, as this is a private method hence we need to
# use the pylint pragma to escape the pylint warning.
if node_doc.has_returns():
entries = node_doc._parse_section( # pylint: disable=protected-access
_check_docs_utils.GoogleDocstring.re_returns_section)
entries = [''.join(entries)]
for entry in entries:
match = re_returns_line.match(entry)
if not match:
self.add_message('malformed-returns-section', node=node)
# We need to extract the information from the given section for that
# we need to use _parse_section as this will extract all the yields
# from the Yields section, as this is a private method hence we need to
# use the pylint pragma to escape the pylint warning.
if node_doc.has_yields():
entries = node_doc._parse_section( # pylint: disable=protected-access
_check_docs_utils.GoogleDocstring.re_yields_section)
entries = [''.join(entries)]
for entry in entries:
match = re_yields_line.match(entry)
if not match:
self.add_message('malformed-yields-section', node=node)
# We need to extract the information from the given section for that
# we need to use _parse_section as this will extract all the exceptions
# from the Raises section, as this is a private method hence we need to
# use the pylint pragma to escape the pylint warning.
if node_doc.exceptions():
entries = node_doc._parse_section( # pylint: disable=protected-access
_check_docs_utils.GoogleDocstring.re_raise_section)
for entry in entries:
match = re_raise_line.match(entry)
if not match:
self.add_message('malformed-raises-section', node=node)
def check_functiondef_params(self, node, node_doc):
"""Checks whether all parameters in a function definition are
documented.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
node_doc: Docstring. Pylint Docstring class instance representing
a node's docstring.
"""
node_allow_no_param = None
if node.name in self.constructor_names:
class_node = checker_utils.node_frame_class(node)
if class_node is not None:
class_doc = docstrings_checker.docstringify(class_node.doc)
self.check_single_constructor_params(
class_doc, node_doc, class_node)
# __init__ or class docstrings can have no parameters documented
# as long as the other documents them.
node_allow_no_param = (
class_doc.has_params() or
class_doc.params_documented_elsewhere() or
None
)
class_allow_no_param = (
node_doc.has_params() or
node_doc.params_documented_elsewhere() or
None
)
self.check_arguments_in_docstring(
class_doc, node.args, class_node,
accept_no_param_doc=class_allow_no_param)
self.check_arguments_in_docstring(
node_doc, node.args, node,
accept_no_param_doc=node_allow_no_param)
def check_docstring_style(self, node):
"""It fetches a function node and extract the class node from function
node if it is inside a class body and passes it to
check_docstring_structure which checks whether the docstring has a
space at the beginning and a period at the end.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
if node.name in self.constructor_names:
class_node = checker_utils.node_frame_class(node)
if class_node is not None:
self.check_docstring_structure(class_node)
self.check_docstring_structure(node)
def check_newline_above_args(self, node, docstring):
"""Checks to ensure that there is a single space above the
argument parameters in the docstring.
Args:
node: astroid.node.Function. Node for a function or method
definition in the AST.
docstring: list(str). Function docstring in splitted by newlines.
"""
blank_line_counter = 0
for line in docstring:
line = line.strip()
if line == b'':
blank_line_counter += 1
if blank_line_counter == 0 or blank_line_counter > 1:
if line == b'Args:':
self.add_message(
'single-space-above-args', node=node)
elif line == b'Returns:':
self.add_message(
'single-space-above-returns', node=node)
elif line == b'Raises:':
self.add_message(
'single-space-above-raises', node=node)
elif line == b'Yields:':
self.add_message(
'single-space-above-yield', node=node)
if line != b'':
blank_line_counter = 0
def check_docstring_structure(self, node):
"""Checks whether the docstring has the correct structure i.e.
do not have space at the beginning and have a period at the end of
docstring.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
if node.doc:
docstring = node.doc.splitlines()
# Check for space after """ in docstring.
if docstring[0][0] == b' ':
self.add_message('space-after-triple-quote', node=node)
# Check if single line docstring span two lines.
if len(docstring) == 2 and docstring[-1].strip() == b'':
self.add_message(
'single-line-docstring-span-two-lines', node=node)
# Check for punctuation at end of a single line docstring.
elif (len(docstring) == 1 and docstring[-1][-1] not in
ALLOWED_TERMINATING_PUNCTUATIONS):
self.add_message('no-period-used', node=node)
# Check for punctuation at the end of a multiline docstring.
elif len(docstring) > 1:
if docstring[-2].strip() == b'':
self.add_message('empty-line-before-end', node=node)
elif docstring[-1].strip() != b'':
self.add_message(
'no-newline-used-at-end', node=node)
elif (docstring[-2][-1] not in
ALLOWED_TERMINATING_PUNCTUATIONS and not
any(word in docstring[-2] for word in EXCLUDED_PHRASES)):
self.add_message('no-period-used', node=node)
def check_docstring_section_indentation(self, node):
"""Checks whether the function argument definitions ("Args": section,
"Returns": section, "Yield": section, "Raises: section) are indented
properly. Parameters should be indented by 4 relative to the 'Args:'
'Return:', 'Raises:', 'Yield:' line and any wrap-around descriptions
should be indented by 8.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
arguments_node = node.args
expected_argument_names = set(
None if (arg.name in self.not_needed_param_in_docstring)
else (arg.name + ':') for arg in arguments_node.args)
currently_in_args_section = False
# When we are in the args section and a line ends in a colon,
# we can ignore the indentation styling in the next section of
# description, hence a freeform section.
currently_in_freeform_section = False
args_indentation = 0
if node.doc:
current_docstring_section = None
in_description = False
args_indentation_in_spaces = 0
docstring = node.doc.splitlines()
self.check_newline_above_args(node, docstring)
for line in docstring:
stripped_line = line.lstrip()
current_line_indentation = (
len(line) - len(stripped_line))
parameter = re.search(
'^[^:]+:',
stripped_line)
# Check for empty lines and ignore them.
if len(line.strip()) == 0:
continue
# If line starts with Returns: , it is the header of a Returns
# subsection.
if stripped_line.startswith('Returns:'):
current_docstring_section = (
self.DOCSTRING_SECTION_RETURNS)
in_freeform_section = False
in_description = False
args_indentation_in_spaces = current_line_indentation
# If line starts with Raises: , it is the header of a Raises
# subsection.
elif stripped_line.startswith('Raises:'):
current_docstring_section = (
self.DOCSTRING_SECTION_RAISES)
in_freeform_section = False
in_description = False
args_indentation_in_spaces = current_line_indentation
# If line starts with Yields: , it is the header of a Yields
# subsection.
elif stripped_line.startswith('Yields:'):
current_docstring_section = (
self.DOCSTRING_SECTION_YIELDS)
in_freeform_section = False
in_description = False
args_indentation_in_spaces = current_line_indentation
# Check if we are in a docstring raises section.
elif (current_docstring_section and
(current_docstring_section ==
self.DOCSTRING_SECTION_RAISES)):
# In the raises section, if we see this regex expression, we
# can assume it's the start of a new parameter definition.
# We check the indentation of the parameter definition.
if re.search(br'^[a-zA-Z0-9_\.\*]+[.] ',
stripped_line):
if current_line_indentation != (
args_indentation_in_spaces + 4):
self.add_message(
'4-space-indentation-in-docstring',
node=node)
in_description = True
# In a description line that is wrapped around (doesn't
# start off with the parameter name), we need to make sure
# the indentation is 8.
elif in_description:
if current_line_indentation != (
args_indentation_in_spaces + 8):
self.add_message(
'8-space-indentation-in-docstring',
node=node)
# Check if we are in a docstring returns or yields section.
# NOTE: Each function should only have one yield or return
# object. If a tuple is returned, wrap both in a tuple parameter
# section.
elif (current_docstring_section and
(current_docstring_section ==
self.DOCSTRING_SECTION_RETURNS)
or (current_docstring_section ==
self.DOCSTRING_SECTION_YIELDS)):
# Check for the start of a new parameter definition in the
# format "type (elaboration)." and check the indentation.
if (re.search(br'^[a-zA-Z_() -:,\*]+\.',
stripped_line) and not in_description):
if current_line_indentation != (
args_indentation_in_spaces + 4):
self.add_message(
'4-space-indentation-in-docstring',
node=node)
# If the line ends with a colon, we can assume the rest
# of the section is free form.
if re.search(br':$', stripped_line):
in_freeform_section = True
in_description = True
# In a description line of a returns or yields, we keep the
# indentation the same as the definition line.
elif in_description:
if (current_line_indentation != (
args_indentation_in_spaces + 4)
and not in_freeform_section):
self.add_message(
'4-space-indentation-in-docstring',
node=node)
# If the description line ends with a colon, we can
# assume the rest of the section is free form.
if re.search(br':$', stripped_line):
in_freeform_section = True
# Check for the start of an Args: section and check the correct
# indentation.
elif stripped_line.startswith('Args:'):
args_indentation = current_line_indentation
# The current args indentation is incorrect.
if current_line_indentation % 4 != 0:
self.add_message(
'incorrect-indentation-for-arg-header-doc',
node=node)
# Since other checks are based on relative indentation,
# we need to fix this indentation first.
break
currently_in_args_section = True
# Check for parameter section header by checking that the
# parameter is in the function arguments set. We also check for
# arguments that start with * which means it's autofill and will
# not appear in the node args list so we handle those too.
elif (currently_in_args_section and parameter
and ((
parameter.group(0).strip('*')
in expected_argument_names) or
re.search(
br'\*[^ ]+: ',
stripped_line))):
words_in_line = stripped_line.split(' ')
currently_in_freeform_section = False
# Check if the current parameter section indentation is
# correct.
if current_line_indentation != (
args_indentation + 4):
# Use the first word in the line to identify the error.
beginning_of_line = (
words_in_line[0]
if words_in_line else None)
self.add_message(
'4-space-indentation-for-arg-parameters-doc',
node=node,
args=(beginning_of_line))
# If the line ends with a colon, that means
# the next subsection of description is free form.
if line.endswith(':'):
currently_in_freeform_section = True
# All other lines can be treated as description.
elif currently_in_args_section:
# If it is not a freeform section, we check the indentation.
words_in_line = stripped_line.split(' ')
if (not currently_in_freeform_section
and current_line_indentation != (
args_indentation + 8)):
# Use the first word in the line to identify the error.
beginning_of_line = (
words_in_line[0]
if words_in_line else None)
self.add_message(
'8-space-indentation-for-arg-in-descriptions-doc',
node=node,
args=(beginning_of_line))
# If the line ends with a colon, that
# means the next subsection of description is free form.
if line.endswith(':'):
currently_in_freeform_section = True
def check_functiondef_returns(self, node, node_doc):
"""Checks whether a function documented with a return value actually has
a return statement in its definition.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
node_doc: Docstring. Pylint Docstring class instance representing
a node's docstring.
"""
if not node_doc.supports_yields and node.is_generator():
return
return_nodes = node.nodes_of_class(astroid.Return)
if ((
node_doc.has_returns() or node_doc.has_rtype()) and
not any(
docstrings_checker.returns_something(
ret_node) for ret_node in return_nodes)):
self.add_message(
'redundant-returns-doc',
node=node)
def check_functiondef_yields(self, node, node_doc):
"""Checks whether a function documented with a yield value actually has
a yield statement in its definition.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
node_doc: Docstring. Pylint Docstring class instance representing
a node's docstring.
"""
if not node_doc.supports_yields:
return
if ((node_doc.has_yields() or node_doc.has_yields_type()) and
not node.is_generator()):
self.add_message(
'redundant-yields-doc',
node=node)
def visit_raise(self, node):
"""Visits a function node that raises an exception and verifies that all
exceptions raised in the function definition are documented.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
func_node = node.frame()
if not isinstance(func_node, astroid.FunctionDef):
return
expected_excs = docstrings_checker.possible_exc_types(node)
if not expected_excs:
return
if not func_node.doc:
# If this is a property setter,
# the property should have the docstring instead.
setters_property = docstrings_checker.get_setters_property(
func_node)
if setters_property:
func_node = setters_property
doc = docstrings_checker.docstringify(func_node.doc)
if not doc.is_valid():
if doc.doc:
self._handle_no_raise_doc(expected_excs, func_node)
return
found_excs = doc.exceptions()
missing_excs = expected_excs - found_excs
self._add_raise_message(missing_excs, func_node)
def visit_return(self, node):
"""Visits a function node that contains a return statement and verifies
that the return value and the return type are documented.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
if not docstrings_checker.returns_something(node):
return
func_node = node.frame()
doc = docstrings_checker.docstringify(func_node.doc)
if not doc.is_valid() and self.config.accept_no_return_doc:
return
is_property = checker_utils.decorated_with_property(func_node)
if not (doc.has_returns() or
(doc.has_property_returns() and is_property)):
self.add_message(
'missing-return-doc',
node=func_node
)
if not (doc.has_rtype() or
(doc.has_property_type() and is_property)):
self.add_message(
'missing-return-type-doc',
node=func_node
)
def visit_yield(self, node):
"""Visits a function node that contains a yield statement and verifies
that the yield value and the yield type are documented.
Args:
node: astroid.scoped_nodes.Function. Node for a function or
method definition in the AST.
"""
func_node = node.frame()
doc = docstrings_checker.docstringify(func_node.doc)
if not doc.is_valid() and self.config.accept_no_yields_doc:
return
doc_has_yields = doc.has_yields()
doc_has_yields_type = doc.has_yields_type()
if not doc_has_yields:
self.add_message(
'missing-yield-doc',
node=func_node
)
if not doc_has_yields_type:
self.add_message(
'missing-yield-type-doc',
node=func_node
)
def visit_yieldfrom(self, node):
"""Visits a function node that contains a yield from statement and
verifies that the yield from value and the yield from type are
documented.
Args:
node: astroid.scoped_nodes.Function. Node to access module content.
"""
self.visit_yield(node)
def check_arguments_in_docstring(
self, doc, arguments_node, warning_node, accept_no_param_doc=None):
"""Check that all parameters in a function, method or class constructor
on the one hand and the parameters mentioned in the parameter
documentation (e.g. the Sphinx tags 'param' and 'type') on the other
hand are consistent with each other.
* Undocumented parameters except 'self' are noticed.
* Undocumented parameter types except for 'self' and the ``*<args>``
and ``**<kwargs>`` parameters are noticed.
* Parameters mentioned in the parameter documentation that don't or no
longer exist in the function parameter list are noticed.
* If the text "For the parameters, see" or "For the other parameters,
see" (ignoring additional whitespace) is mentioned in the docstring,
missing parameter documentation is tolerated.
* If there's no Sphinx style, Google style or NumPy style parameter
documentation at all, i.e. ``:param`` is never mentioned etc., the
checker assumes that the parameters are documented in another format
and the absence is tolerated.
Args:
doc: str. Docstring for the function, method or class.
arguments_node: astroid.scoped_nodes.Arguments. Arguments node
for the function, method or class constructor.
warning_node: astroid.scoped_nodes.Node. The node to assign
the warnings to.
accept_no_param_doc: bool|None. Whether or not to allow
no parameters to be documented. If None then
this value is read from the configuration.
"""
# Tolerate missing param or type declarations if there is a link to
# another method carrying the same name.
if not doc.doc:
return
if accept_no_param_doc is None:
accept_no_param_doc = self.config.accept_no_param_doc
tolerate_missing_params = doc.params_documented_elsewhere()
# Collect the function arguments.
expected_argument_names = set(
arg.name for arg in arguments_node.args)
expected_argument_names.update(
arg.name for arg in arguments_node.kwonlyargs)
not_needed_type_in_docstring = (
self.not_needed_param_in_docstring.copy())
if arguments_node.vararg is not None:
expected_argument_names.add(arguments_node.vararg)
not_needed_type_in_docstring.add(arguments_node.vararg)
if arguments_node.kwarg is not None:
expected_argument_names.add(arguments_node.kwarg)
not_needed_type_in_docstring.add(arguments_node.kwarg)
params_with_doc, params_with_type = doc.match_param_docs()
# Tolerate no parameter documentation at all.
if (not params_with_doc and not params_with_type
and accept_no_param_doc):
tolerate_missing_params = True
def _compare_missing_args(
found_argument_names, message_id, not_needed_names):
"""Compare the found argument names with the expected ones and
generate a message if there are arguments missing.
Args:
found_argument_names: set. Argument names found in the
docstring.
message_id: str. Pylint message id.
not_needed_names: set(str). Names that may be omitted.
"""
if not tolerate_missing_params:
missing_argument_names = (
(expected_argument_names - found_argument_names)
- not_needed_names)
if missing_argument_names:
self.add_message(
message_id,
args=(', '.join(
sorted(missing_argument_names)),),
node=warning_node)
def _compare_different_args(
found_argument_names, message_id, not_needed_names):
"""Compare the found argument names with the expected ones and
generate a message if there are extra arguments found.
Args:
found_argument_names: set. Argument names found in the
docstring.
message_id: str. Pylint message id.
not_needed_names: set(str). Names that may be omitted.
"""
differing_argument_names = (
(expected_argument_names ^ found_argument_names)
- not_needed_names - expected_argument_names)
if differing_argument_names:
self.add_message(
message_id,
args=(', '.join(
sorted(differing_argument_names)),),
node=warning_node)
_compare_missing_args(
params_with_doc, 'missing-param-doc',
self.not_needed_param_in_docstring)
_compare_missing_args(
params_with_type, 'missing-type-doc', not_needed_type_in_docstring)
_compare_different_args(
params_with_doc, 'differing-param-doc',
self.not_needed_param_in_docstring)
_compare_different_args(
params_with_type, 'differing-type-doc',
not_needed_type_in_docstring)
def check_single_constructor_params(self, class_doc, init_doc, class_node):
"""Checks whether a class and corresponding init() method are
documented. If both of them are documented, it adds an error message.
Args:
class_doc: Docstring. Pylint docstring class instance representing
a class's docstring.
init_doc: Docstring. Pylint docstring class instance representing
a method's docstring, the method here is the constructor method
for the above class.
class_node: astroid.scoped_nodes.Function. Node for class definition
in AST.
"""
if class_doc.has_params() and init_doc.has_params():
self.add_message(
'multiple-constructor-doc',
args=(class_node.name,),
node=class_node)
def _handle_no_raise_doc(self, excs, node):
"""Checks whether the raised exception in a function has been
documented, add a message otherwise.
Args:
excs: list(str). A list of exception types.
node: astroid.scoped_nodes.Function. Node to access module content.
"""
if self.config.accept_no_raise_doc:
return
self._add_raise_message(excs, node)
def _add_raise_message(self, missing_excs, node):
"""Adds a message on :param:`node` for the missing exception type.
Args:
missing_excs: list(Exception). A list of missing exception types.
node: astroid.node_classes.NodeNG. The node show the message on.
"""
if not missing_excs:
return
self.add_message(
'missing-raises-doc',
args=(', '.join(sorted(missing_excs)),),
node=node)
class ImportOnlyModulesChecker(checkers.BaseChecker):
"""Checker for import-from statements. It checks that
modules are only imported.
"""
__implements__ = interfaces.IAstroidChecker
name = 'import-only-modules'
priority = -1
msgs = {
'C0003': (
'Import \"%s\" from \"%s\" is not a module.',
'import-only-modules',
'Modules should only be imported.',
),
}
@checker_utils.check_messages('import-only-modules')
def visit_importfrom(self, node):
"""Visits all import-from statements in a python file and checks that
modules are imported. It then adds a message accordingly.
Args:
node: astroid.node_classes.ImportFrom. Node for a import-from
statement in the AST.
"""
try:
imported_module = node.do_import_module(node.modname)
except astroid.AstroidBuildingException:
return
if node.level is None:
modname = node.modname
else:
modname = '.' * node.level + node.modname
for (name, _) in node.names:
if name == 'constants':
continue
try:
imported_module.import_module(name, True)
except astroid.AstroidImportError:
self.add_message(
'import-only-modules',
node=node,
args=(name, modname),
)
class BackslashContinuationChecker(checkers.BaseChecker):
"""Custom pylint checker which checks that backslash is not used
for continuation.
"""
__implements__ = interfaces.IRawChecker
name = 'backslash-continuation'
priority = -1
msgs = {
'C0004': (
(
'Backslash should not be used to break continuation lines. '
'Use braces to break long lines.'),
'backslash-continuation',
'Use braces to break long lines instead of backslash.'
),
}
def process_module(self, node):
"""Process a module.
Args:
node: astroid.scoped_nodes.Function. Node to access module content.
"""
file_content = read_from_node(node)
for (line_num, line) in enumerate(file_content):
if line.rstrip(b'\r\n').endswith(b'\\'):
self.add_message(
'backslash-continuation', line=line_num + 1)
class FunctionArgsOrderChecker(checkers.BaseChecker):
"""Custom pylint checker which checks the order of arguments in function
definition.
"""
__implements__ = interfaces.IAstroidChecker
name = 'function-args-order'
priority = -1
msgs = {
'C0005': (
'Wrong order of arguments in function definition '
'\'self\' should come first.',
'function-args-order-self',
'\'self\' should come first',),
'C0006': (
'Wrong order of arguments in function definition '
'\'cls\' should come first.',
'function-args-order-cls',
'\'cls\' should come first'),
}
def visit_functiondef(self, node):
"""Visits every function definition in the python file and check the
function arguments order. It then adds a message accordingly.
Args:
node: astroid.scoped_nodes.Function. Node for a function or method
definition in the AST.
"""
args_list = [args.name for args in node.args.args]
if 'self' in args_list and args_list[0] != 'self':
self.add_message('function-args-order-self', node=node)
elif 'cls' in args_list and args_list[0] != 'cls':
self.add_message('function-args-order-cls', node=node)
class RestrictedImportChecker(checkers.BaseChecker):
"""Custom pylint checker which checks layers importing modules
from their respective restricted layers.
"""
__implements__ = interfaces.IAstroidChecker
name = 'invalid-import'
priority = -1
msgs = {
'C0009': (
'Importing %s layer in %s layer is prohibited.',
'invalid-import',
'Storage layer and domain layer must not import'
'domain layer and controller layer respectively.'),
}
def visit_import(self, node):
"""Visits every import statement in the file.
Args:
node: astroid.node_classes.Import. Node for a import statement
in the AST.
"""
modnode = node.root()
names = [name for name, _ in node.names]
# Checks import of domain layer in storage layer.
if 'oppia.core.storage' in modnode.name and not '_test' in modnode.name:
if any('core.domain' in name for name in names):
self.add_message(
'invalid-import',
node=node,
args=('domain', 'storage'),
)
# Checks import of controller layer in domain layer.
if 'oppia.core.domain' in modnode.name and not '_test' in modnode.name:
if any('core.controllers' in name for name in names):
self.add_message(
'invalid-import',
node=node,
args=('controller', 'domain'),
)
def visit_importfrom(self, node):
"""Visits all import-from statements in a python file and checks that
modules are imported. It then adds a message accordingly.
Args:
node: astroid.node_classes.ImportFrom. Node for a import-from
statement in the AST.
"""
modnode = node.root()
if 'oppia.core.storage' in modnode.name and not '_test' in modnode.name:
if 'core.domain' in node.modname:
self.add_message(
'invalid-import',
node=node,
args=('domain', 'storage'),
)
if 'oppia.core.domain' in modnode.name and not '_test' in modnode.name:
if 'core.controllers' in node.modname:
self.add_message(
'invalid-import',
node=node,
args=('controller', 'domain'),
)
class SingleCharAndNewlineAtEOFChecker(checkers.BaseChecker):
"""Checker for single character files and newline at EOF."""
__implements__ = interfaces.IRawChecker
name = 'newline-at-eof'
priority = -1
msgs = {
'C0007': (
'Files should end in a single newline character.',
'newline-at-eof',
'Please enter a single newline at the end of the file.'),
'C0008': (
'Only one character in file',
'only-one-character',
'Files with only one character are not allowed.'),
}
def process_module(self, node):
"""Process a module.
Args:
node: astroid.scoped_nodes.Function. Node to access module content.
"""
file_content = read_from_node(node)
file_length = len(file_content)
if file_length == 1 and len(file_content[0]) == 1:
self.add_message('only-one-character', line=file_length)
if file_length >= 2 and not re.search(r'[^\n]\n', file_content[-1]):
self.add_message('newline-at-eof', line=file_length)
class SingleSpaceAfterYieldChecker(checkers.BaseChecker):
"""Checks if only one space is used after a yield statement
when applicable ('yield' is acceptable).
"""
__implements__ = interfaces.IAstroidChecker
name = 'single-space-after-yield'
priority = -1
msgs = {
'C0010': (
'Not using \'yield\' or a single space after yield statement.',
'single-space-after-yield',
'Ensure a single space is used after yield statement.',
),
}
def visit_yield(self, node):
"""Visit every yield statement to ensure that yield keywords are
followed by exactly one space, so matching 'yield *' where * is not a
whitespace character. Note that 'yield' is also acceptable in
cases where the user wants to yield nothing.
Args:
node: astroid.nodes.Yield. Nodes to access yield statements.
content.
"""
line_number = node.fromlineno
line = linecache.getline(node.root().file, line_number).lstrip()
if (line.startswith(b'yield') and
not re.search(br'^(yield)( \S|$|\w)', line)):
self.add_message('single-space-after-yield', node=node)
class DivisionOperatorChecker(checkers.BaseChecker):
"""Checks if division operator is used."""
__implements__ = interfaces.IAstroidChecker
name = 'division-operator-used'
priority = -1
msgs = {
'C0015': (
'Division Operator is used.',
'division-operator-used',
'Please use python_utils.divide() instead of the "/" operator'
)
}
def visit_binop(self, node):
"""Visit assign statements to ensure that the division operator('/')
is not used and python_utils.divide() is used instead.
Args:
node: astroid.node.BinOp. Node to access module content.
"""
if node.op == b'/':
self.add_message(
'division-operator-used', node=node)
class SingleLineCommentChecker(checkers.BaseChecker):
"""Checks if comments follow correct style."""
__implements__ = interfaces.ITokenChecker
name = 'incorrectly_styled_comment'
priority = -1
msgs = {
'C0016': (
'Invalid punctuation is used.',
'invalid-punctuation-used',
'Please use valid punctuation.'
),
'C0017': (
'No space is used at beginning of comment.',
'no-space-at-beginning',
'Please use single space at the beginning of comment.'
),
'C0018': (
'No capital letter is used at the beginning of comment.',
'no-capital-letter-at-beginning',
'Please use capital letter to begin the content of comment.'
)
}
options = ((
'allowed-comment-prefixes',
{
'default': ('int', 'str', 'float', 'bool', 'v'),
'type': 'csv', 'metavar': '<comma separated list>',
'help': 'List of allowed prefixes in a comment.'
}
),)
def _check_space_at_beginning_of_comments(self, line, line_num):
"""Checks if the comment starts with a space.
Args:
line: str. The current line of comment.
line_num: int. Line number of the current comment.
"""
if re.search(br'^#[^\s].*$', line) and not line.startswith(b'#!'):
self.add_message(
'no-space-at-beginning', line=line_num)
def _check_comment_starts_with_capital_letter(self, line, line_num):
"""Checks if the comment starts with a capital letter.
Comments may include a lowercase character at the beginning only if they
start with version info or a data type or a variable name e.g.
"# next_line is of string type." or "# v2 version does not have
ExplorationStats Model." or "# int. The file size, in bytes.".
Args:
line: str. The current line of comment.
line_num: int. Line number of the current comment.
"""
# Check if variable name is used.
if line[1:].startswith(b' '):
starts_with_underscore = '_' in line.split()[1]
else:
starts_with_underscore = '_' in line.split()[0]
# Check if allowed prefix is used.
allowed_prefix_is_present = any(
line[2:].startswith(word) for word in
self.config.allowed_comment_prefixes)
# Check if comment contains any excluded phrase.
excluded_phrase_is_present = any(
line[1:].strip().startswith(word) for word in EXCLUDED_PHRASES)
if (re.search(br'^# [a-z].*', line) and not (
excluded_phrase_is_present or
starts_with_underscore or allowed_prefix_is_present)):
self.add_message(
'no-capital-letter-at-beginning', line=line_num)
def _check_punctuation(self, line, line_num):
"""Checks if the comment starts with a correct punctuation.
Args:
line: str. The current line of comment.
line_num: int. Line number of the current comment.
"""
excluded_phrase_is_present_at_end = any(
word in line for word in EXCLUDED_PHRASES)
# Comments must end with the proper punctuation.
last_char_is_invalid = line[-1] not in (
ALLOWED_TERMINATING_PUNCTUATIONS)
excluded_phrase_at_beginning_of_line = any(
line[1:].startswith(word) for word in EXCLUDED_PHRASES)
if (last_char_is_invalid and not (
excluded_phrase_is_present_at_end or
excluded_phrase_at_beginning_of_line)):
self.add_message('invalid-punctuation-used', line=line_num)
def process_tokens(self, tokens):
"""Custom pylint checker to ensure that comments follow correct style.
Args:
tokens: list(Token). Object to access all tokens of a module.
"""
prev_line_num = -1
comments_group_list = []
comments_index = -1
for (token_type, _, (line_num, _), _, line) in tokens:
if token_type == tokenize.COMMENT and line.strip().startswith('#'):
line = line.strip()
self._check_space_at_beginning_of_comments(line, line_num)
if prev_line_num + 1 == line_num:
comments_group_list[comments_index].append((line, line_num))
else:
comments_group_list.append([(line, line_num)])
comments_index += 1
prev_line_num = line_num
for comments in comments_group_list:
# Checks first line of comment.
self._check_comment_starts_with_capital_letter(*comments[0])
# Checks last line of comment.
self._check_punctuation(*comments[-1])
class BlankLineBelowFileOverviewChecker(checkers.BaseChecker):
"""Checks if there is a single empty line below the fileoverview docstring.
Note: The check assumes that all files have a file overview. This
assumption is justified because Pylint has an inbuilt check
(missing-docstring) for missing file overviews.
"""
__implements__ = interfaces.IAstroidChecker
name = 'space_between_imports_and_file-overview'
priority = -1
msgs = {
'C0024': (
'No empty line used below the fileoverview docstring.',
'no-empty-line-provided-below-fileoverview',
'please provide an empty line below the fileoverview.'
),
'C0025': (
'Single empty line should be provided below the fileoverview.',
'only-a-single-empty-line-should-be-provided',
'please provide an empty line below the fileoverview.'
)
}
def visit_module(self, node):
"""Visit a module to ensure that there is a blank line below
file overview docstring.
Args:
node: astroid.scoped_nodes.Function. Node to access module content.
"""
# Check if the given node has docstring.
if node.doc is None:
return
line_number = node.fromlineno
# Iterate till the start of docstring.
while True:
line = linecache.getline(node.root().file, line_number).strip()
if line.startswith((b'\'', b'"')):
break
else:
line_number += 1
doc_length = len(node.doc.split(b'\n'))
line_number += doc_length
first_line_after_doc = linecache.getline(
node.root().file, line_number).strip()
second_line_after_doc = linecache.getline(
node.root().file, line_number + 1).strip()
if first_line_after_doc != b'':
self.add_message(
'no-empty-line-provided-below-fileoverview', node=node)
elif second_line_after_doc == b'':
self.add_message(
'only-a-single-empty-line-should-be-provided', node=node)
class SingleLinePragmaChecker(checkers.BaseChecker):
"""Custom pylint checker which checks if pylint pragma is used to disable
a rule for a single line only.
"""
__implements__ = interfaces.ITokenChecker
name = 'single-line-pragma'
priority = -1
msgs = {
'C0028': (
'Pylint pragmas should be used to disable a rule '
'for a single line only',
'single-line-pragma',
'Please use pylint pragmas to disable a rule for a single line only'
)
}
def process_tokens(self, tokens):
"""Custom pylint checker which allows paramas to disable a rule for a
single line only.
Args:
tokens: Token. Object to access all tokens of a module.
"""
for (token_type, _, (line_num, _), _, line) in tokens:
if token_type == tokenize.COMMENT:
line = line.lstrip()
# Ignore line that is enabling this check.
# Example:
# # pylint: disable=import-only-modules, single-line-pragma
# def func(a, b):
# # pylint: enable=import-only-modules, single-line-pragma
# Now if do not ignore the line with 'enable' statement
# pylint will raise the error of single-line-pragma because
# from here on all this lint check is enabled. So we need to
# ignore this line.
if re.search(br'^(#\s*pylint:)', line):
if 'enable' in line and 'single-line-pragma' in line:
continue
self.add_message(
'single-line-pragma', line=line_num)
def register(linter):
"""Registers the checker with pylint.
Args:
linter: Pylinter. The Pylinter object.
"""
linter.register_checker(ExplicitKeywordArgsChecker(linter))
linter.register_checker(HangingIndentChecker(linter))
linter.register_checker(DocstringParameterChecker(linter))
linter.register_checker(ImportOnlyModulesChecker(linter))
linter.register_checker(BackslashContinuationChecker(linter))
linter.register_checker(FunctionArgsOrderChecker(linter))
linter.register_checker(RestrictedImportChecker(linter))
linter.register_checker(SingleCharAndNewlineAtEOFChecker(linter))
linter.register_checker(SingleSpaceAfterYieldChecker(linter))
linter.register_checker(DivisionOperatorChecker(linter))
linter.register_checker(SingleLineCommentChecker(linter))
linter.register_checker(BlankLineBelowFileOverviewChecker(linter))
linter.register_checker(SingleLinePragmaChecker(linter))<|fim▁end|> | called.args.defaults) |
<|file_name|>double-anchor.rs<|end_file_name|><|fim▁begin|>// check-pass
<|fim▁hole|>/// docs [label][with#anchor#error]
//~^ WARNING multiple anchors
pub struct S;<|fim▁end|> | // regression test for #73264
// should only give one error |
<|file_name|>artifacts_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package spyglass
import (
"context"
"reflect"
"testing"
"k8s.io/test-infra/prow/io"
)
func TestSpyglass_ListArtifacts(t *testing.T) {
type args struct {
src string
}
tests := []struct {
name string
args args
want []string
wantErr bool
}{
{
name: "list artifacts (old format)",
args: args{
src: "gcs/test-bucket/logs/example-ci-run/403",
},
want: []string{
"build-log.txt",
"finished.json",
"junit_01.xml",<|fim▁hole|> },
},
{
name: "list artifacts (new format)",
args: args{
src: "gs/test-bucket/logs/example-ci-run/403",
},
want: []string{
"build-log.txt",
"finished.json",
"junit_01.xml",
"long-log.txt",
"started.json",
},
},
{
name: "list artifacts without results in gs (new format)",
args: args{
src: "gs/test-bucket/logs/example-ci-run/404",
},
want: []string{
"build-log.txt",
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fakeGCSClient := fakeGCSServer.Client()
sg := New(context.Background(), fakeJa, nil, io.NewGCSOpener(fakeGCSClient), false)
got, err := sg.ListArtifacts(context.Background(), tt.args.src)
if (err != nil) != tt.wantErr {
t.Errorf("ListArtifacts() error = %v, wantErr %v", err, tt.wantErr)
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("ListArtifacts() got = %v, want %v", got, tt.want)
}
})
}
}<|fim▁end|> | "long-log.txt",
"started.json", |
<|file_name|>23-inheritance.py<|end_file_name|><|fim▁begin|>class A:
def foo(self):
print('A.foo()')
class B(A):
def foo(self):
print('B.foo()')
class C(A):
def foo(self):
print('C.foo()')<|fim▁hole|> def foo(self):
print('D.foo()')
x = D()
print(D.__mro__) # (D, B, C, A, object)
x.foo() # D.foo()<|fim▁end|> |
class D(B, C): |
<|file_name|>build.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
var getModulesList = function(modules) {
return modules.map(function(moduleName) {
return {name: moduleName};
});
};
var jsOptimize = process.env.REQUIRE_BUILD_PROFILE_OPTIMIZE !== undefined ?
process.env.REQUIRE_BUILD_PROFILE_OPTIMIZE : 'uglify2';
return {
namespace: 'RequireJS',
/**
* List the modules that will be optimized. All their immediate and deep
* dependencies will be included in the module's file when the build is
* done.
*/
modules: getModulesList([
'course_bookmarks/js/course_bookmarks_factory',
'course_search/js/course_search_factory',
'course_search/js/dashboard_search_factory',
'discussion/js/discussion_board_factory',
'discussion/js/discussion_profile_page_factory',
'js/api_admin/catalog_preview_factory',
'js/courseware/courseware_factory',
'js/discovery/discovery_factory',
'js/edxnotes/views/notes_visibility_factory',
'js/edxnotes/views/page_factory',
'js/financial-assistance/financial_assistance_form_factory',
'js/groups/views/cohorts_dashboard_factory',
'js/discussions_management/views/discussions_dashboard_factory',
'js/header_factory',
'js/learner_dashboard/course_entitlement_factory',
'js/learner_dashboard/unenrollment_factory',
'js/learner_dashboard/entitlement_unenrollment_factory',
'js/learner_dashboard/program_details_factory',
'js/learner_dashboard/program_list_factory',
'js/student_account/logistration_factory',
'js/student_account/views/account_settings_factory',
'js/student_account/views/finish_auth_factory',
'js/views/message_banner',
'learner_profile/js/learner_profile_factory',
'lms/js/preview/preview_factory',
'support/js/certificates_factory',
'support/js/enrollment_factory',
'support/js/manage_user_factory',
'teams/js/teams_tab_factory',
'js/dateutil_factory'
]),
/**
* By default all the configuration for optimization happens from the command
* line or by properties in the config file, and configuration that was
* passed to requirejs as part of the app's runtime "main" JS file is *not*
* considered. However, if you prefer the "main" JS file configuration
* to be read for the build so that you do not have to duplicate the values
* in a separate configuration, set this property to the location of that
* main JS file. The first requirejs({}), require({}), requirejs.config({}),
* or require.config({}) call found in that file will be used.
* As of 2.1.10, mainConfigFile can be an array of values, with the last
* value's config take precedence over previous values in the array.
*/
mainConfigFile: 'require-config.js',
/**
* Set paths for modules. If relative paths, set relative to baseUrl above.
* If a special value of "empty:" is used for the path value, then that
* acts like mapping the path to an empty file. It allows the optimizer to
* resolve the dependency to path, but then does not include it in the output.
* Useful to map module names that are to resources on a CDN or other
* http: URL when running in the browser and during an optimization that
* file should be skipped because it has no dependencies.
*/
paths: {
gettext: 'empty:',
'coffee/src/ajax_prefix': 'empty:',
jquery: 'empty:',
'jquery-migrate': 'empty:',
'jquery.cookie': 'empty:',
'jquery.url': 'empty:',
backbone: 'empty:',
underscore: 'empty:',
'underscore.string': 'empty:',
logger: 'empty:',
utility: 'empty:',
URI: 'empty:',
'common/js/discussion/views/discussion_inline_view': 'empty:',
modernizr: 'empty',
'which-country': 'empty',
// Don't bundle UI Toolkit helpers as they are loaded into the "edx" namespace
'edx-ui-toolkit/js/utils/html-utils': 'empty:',
'edx-ui-toolkit/js/utils/string-utils': 'empty:'
},
/**
* Inline requireJS text templates.
*/
inlineText: true,
/**
* Stub out requireJS text in the optimized file, but leave available for non-optimized development use.
*/
stubModules: ['text'],
/**
* If shim config is used in the app during runtime, duplicate the config
* here. Necessary if shim config is used, so that the shim's dependencies
* are included in the build. Using "mainConfigFile" is a better way to
* pass this information though, so that it is only listed in one place.
* However, if mainConfigFile is not an option, the shim config can be
* inlined in the build config.
*/
shim: {},
/**
* Introduced in 2.1.2: If using "dir" for an output directory, normally the
* optimize setting is used to optimize the build bundles (the "modules"
* section of the config) and any other JS file in the directory. However, if
* the non-build bundle JS files will not be loaded after a build, you can
* skip the optimization of those files, to speed up builds. Set this value
* to true if you want to skip optimizing those other non-build bundle JS<|fim▁hole|> * files.
*/
skipDirOptimize: true,
/**
* When the optimizer copies files from the source location to the
* destination directory, it will skip directories and files that start
* with a ".". If you want to copy .directories or certain .files, for
* instance if you keep some packages in a .packages directory, or copy
* over .htaccess files, you can set this to null. If you want to change
* the exclusion rules, change it to a different regexp. If the regexp
* matches, it means the directory will be excluded. This used to be
* called dirExclusionRegExp before the 1.0.2 release.
* As of 1.0.3, this value can also be a string that is converted to a
* RegExp via new RegExp().
*/
fileExclusionRegExp: /^\.|spec|spec_helpers/,
/**
* Allow CSS optimizations. Allowed values:
* - "standard": @import inlining and removal of comments, unnecessary
* whitespace and line returns.
* Removing line returns may have problems in IE, depending on the type
* of CSS.
* - "standard.keepLines": like "standard" but keeps line returns.
* - "none": skip CSS optimizations.
* - "standard.keepComments": keeps the file comments, but removes line
* returns. (r.js 1.0.8+)
* - "standard.keepComments.keepLines": keeps the file comments and line
* returns. (r.js 1.0.8+)
* - "standard.keepWhitespace": like "standard" but keeps unnecessary whitespace.
*/
optimizeCss: 'none',
/**
* How to optimize all the JS files in the build output directory.
* Right now only the following values are supported:
* - "uglify": Uses UglifyJS to minify the code.
* - "uglify2": Uses UglifyJS2.
* - "closure": Uses Google's Closure Compiler in simple optimization
* mode to minify the code. Only available if REQUIRE_ENVIRONMENT is "rhino" (the default).
* - "none": No minification will be done.
*/
optimize: jsOptimize,
/**
* Sets the logging level. It is a number:
* TRACE: 0,
* INFO: 1,
* WARN: 2,
* ERROR: 3,
* SILENT: 4
* Default is 0.
*/
logLevel: 1
};
}());<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/*
* Copyright 2015-2017 Nathan Fiedler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#[macro_use]
extern crate erlang_nif_sys;
extern crate magick_rust;
extern crate libc;
use erlang_nif_sys::*;
use magick_rust::{MagickWand, magick_wand_genesis, magick_wand_terminus};
use std::ffi::CString;
use std::mem::uninitialized;
/// Create NIF module data and init function.
nif_init!(b"emagick_rs\0", Some(load), None, None, Some(unload),
//
// Ideally these would all have the ERL_NIF_DIRTY_JOB_CPU_BOUND flag
// but support for dirty schedulers is still experimental and not
// available in all distributions (especially FreeBSD).
//
nif!(b"image_fit\0", 3, image_fit, 0),
nif!(b"image_get_property\0", 2, image_get_property, 0),
nif!(b"image_get_format\0", 1, image_get_format, 0),
nif!(b"requires_orientation\0", 1, requires_orientation, 0),
nif!(b"auto_orient\0", 1, auto_orient, 0)
);
/// Resize the image to fit the given dimensions. Always produces a JPEG.
/// Arguments are the binary, desired width, and desired height. The aspect
/// ratio will be maintained.
extern "C" fn image_fit(env: *mut ErlNifEnv,
argc: c_int,
args: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut width:c_int = unsafe { uninitialized() };
let mut height:c_int = unsafe { uninitialized() };
let mut bin:ErlNifBinary = unsafe { uninitialized() };
if argc == 3 &&
0 != unsafe { enif_get_int(env, *args.offset(1), &mut width) } &&
0 != unsafe { enif_get_int(env, *args.offset(2), &mut height) } &&
0 != unsafe { enif_inspect_binary(env, *args, &mut bin) } {
let wand = MagickWand::new();
let slice = unsafe { std::slice::from_raw_parts(bin.data, bin.size as usize) };
let data = Vec::from(slice);
if wand.read_image_blob(&data).is_err() {
return make_err_result(env, "unable to read blob");
}
let image_format = wand.get_image_format();
if image_format.is_err() {
return make_err_result(env, "unable to read image format");
}
wand.fit(width as usize, height as usize);
let blob_result = wand.write_image_blob(image_format.unwrap().as_str());
if blob_result.is_err() {
return make_err_result(env, "unable to write blob");
}
let blob = blob_result.unwrap();
let mut bout:ERL_NIF_TERM = unsafe { uninitialized() };
let buf = unsafe { enif_make_new_binary(env, blob.len() as usize, &mut bout) };
unsafe { std::ptr::copy(blob.as_ptr(), buf, blob.len()) };
make_ok_result(env, &bout)
} else {
unsafe { enif_make_badarg(env) }
}
}
/// Retrieve the named property from the given image data, such as EXIF
/// tagged data. Returns {ok, Data}, or {error, Reason} if an error occurs.
extern "C" fn image_get_property(env: *mut ErlNifEnv,
argc: c_int,
args: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut bin:ErlNifBinary = unsafe { uninitialized() };
if argc == 2 && 0 != unsafe { enif_inspect_binary(env, *args, &mut bin) } {
let wand = MagickWand::new();
let slice = unsafe { std::slice::from_raw_parts(bin.data, bin.size as usize) };
let data = Vec::from(slice);
if wand.read_image_blob(&data).is_err() {
return make_err_result(env, "unable to read blob");
}
// need to allocate the space for the incoming string
// (1024 should be enough for the names of most properties)
let mut name:Vec<c_uchar> = Vec::with_capacity(1024);
let name_len = unsafe { enif_get_string(env, *args.offset(1), name.as_mut_ptr(), 1024,
ErlNifCharEncoding::ERL_NIF_LATIN1) };
if name_len == 0 {
return make_err_result(env, "invalid name argument");
}
unsafe { name.set_len((name_len - 1) as usize) };
let rname = std::str::from_utf8(&name);
if rname.is_err() {
return make_err_result(env, "invalid name");
}
let value = wand.get_image_property(rname.unwrap());
if value.is_err() {
return make_err_result(env, value.unwrap_err());
}
let rvalue = value.unwrap();
let value_str = unsafe { enif_make_string_len(env, rvalue.as_ptr(), rvalue.len(),
ErlNifCharEncoding::ERL_NIF_LATIN1) };
make_ok_result(env, &value_str)
} else {
unsafe { enif_make_badarg(env) }
}
}
/// Retrieve the format from the given image data, such as 'JPEG' or 'PNG'.<|fim▁hole|> argc: c_int,
args: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut bin:ErlNifBinary = unsafe { uninitialized() };
if argc == 1 && 0 != unsafe { enif_inspect_binary(env, *args, &mut bin) } {
let wand = MagickWand::new();
let slice = unsafe { std::slice::from_raw_parts(bin.data, bin.size as usize) };
let data = Vec::from(slice);
if wand.read_image_blob(&data).is_err() {
return make_err_result(env, "unable to read blob");
}
let value = wand.get_image_format();
if value.is_err() {
return make_err_result(env, value.unwrap_err());
}
let rvalue = value.unwrap();
let value_str = unsafe { enif_make_string_len(env, rvalue.as_ptr(), rvalue.len(),
ErlNifCharEncoding::ERL_NIF_LATIN1) };
make_ok_result(env, &value_str)
} else {
unsafe { enif_make_badarg(env) }
}
}
/// Returns true if the image requires auto-orientation, false otherwise.
/// The one argument is the binary image data.
extern "C" fn requires_orientation(env: *mut ErlNifEnv,
argc: c_int,
args: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut bin:ErlNifBinary = unsafe { uninitialized() };
if argc == 1 &&
0 != unsafe { enif_inspect_binary(env, *args, &mut bin) } {
let wand = MagickWand::new();
let slice = unsafe { std::slice::from_raw_parts(bin.data, bin.size as usize) };
let data = Vec::from(slice);
if wand.read_image_blob(&data).is_err() {
return make_err_result(env, "unable to read blob");
}
let result = wand.requires_orientation();
make_boolean(env, result)
} else {
unsafe { enif_make_badarg(env) }
}
}
/// Automatically orient the image so it is suitable for viewing. Always
/// produces a JPEG. The one argument is the binary image data.
extern "C" fn auto_orient(env: *mut ErlNifEnv,
argc: c_int,
args: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut bin:ErlNifBinary = unsafe { uninitialized() };
if argc == 1 &&
0 != unsafe { enif_inspect_binary(env, *args, &mut bin) } {
let wand = MagickWand::new();
let slice = unsafe { std::slice::from_raw_parts(bin.data, bin.size as usize) };
let data = Vec::from(slice);
if wand.read_image_blob(&data).is_err() {
return make_err_result(env, "unable to read blob");
}
if !wand.auto_orient() {
return make_err_result(env, "unable to orient image");
}
let image_format = wand.get_image_format();
if image_format.is_err() {
return make_err_result(env, "unable to read image format");
}
let blob_result = wand.write_image_blob(image_format.unwrap().as_str());
if blob_result.is_err() {
return make_err_result(env, "unable to write blob");
}
let blob = blob_result.unwrap();
let mut bout:ERL_NIF_TERM = unsafe { uninitialized() };
let buf = unsafe { enif_make_new_binary(env, blob.len() as usize, &mut bout) };
unsafe { std::ptr::copy(blob.as_ptr(), buf, blob.len()) };
make_ok_result(env, &bout)
} else {
unsafe { enif_make_badarg(env) }
}
}
/// Initialize the ImageMagick library.
extern "C" fn load(_env: *mut ErlNifEnv,
_priv_data: *mut *mut c_void,
_load_info: ERL_NIF_TERM)-> c_int {
magick_wand_genesis();
0
}
/// Prepare the ImageMagick library for shutdown.
extern "C" fn unload(_env: *mut ErlNifEnv,
_priv_data: *mut c_void) {
magick_wand_terminus();
}
/// Produce a 2-tuple consisting of 'ok' and the given result.
fn make_ok_result(env: *mut ErlNifEnv, result: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
make_tuple(env, "ok", result)
}
/// Produce a 2-tuple consisting of 'error' and the given reason.
fn make_err_result(env: *mut ErlNifEnv, reason: &str) -> ERL_NIF_TERM {
let reason_str = unsafe { enif_make_string_len(env, reason.as_ptr(), reason.len(),
ErlNifCharEncoding::ERL_NIF_LATIN1) };
make_tuple(env, "error", &reason_str)
}
/// Produce a 2-tuple consisting of the label and the term.
/// The label is converted to an atom.
fn make_tuple(env: *mut ErlNifEnv, label: &str, result: *const ERL_NIF_TERM) -> ERL_NIF_TERM {
let mut label_atom:ERL_NIF_TERM = unsafe { uninitialized() };
let c_label_str = CString::new(label).unwrap();
let c_label_nul = c_label_str.as_bytes_with_nul().as_ptr();
// Try using an existing atom, but if that fails, create a new one.
let atom_exists = unsafe { enif_make_existing_atom(
env, c_label_nul, &mut label_atom, ErlNifCharEncoding::ERL_NIF_LATIN1) };
if atom_exists == 0 {
label_atom = unsafe { enif_make_atom(env, c_label_nul) };
}
let tuple_args = unsafe { [label_atom, *result] };
unsafe { enif_make_tuple_from_array(env, tuple_args.as_ptr(), 2) }
}
/// Return an atom for either true or false.
fn make_boolean(env: *mut ErlNifEnv, value: bool) -> ERL_NIF_TERM {
let mut label_atom:ERL_NIF_TERM = unsafe { uninitialized() };
let c_label_str = if value {
CString::new("true").unwrap()
} else {
CString::new("false").unwrap()
};
let c_label_nul = c_label_str.as_bytes_with_nul().as_ptr();
// Try using an existing atom, but if that fails, create a new one.
let atom_exists = unsafe { enif_make_existing_atom(
env, c_label_nul, &mut label_atom, ErlNifCharEncoding::ERL_NIF_LATIN1) };
if atom_exists == 0 {
label_atom = unsafe { enif_make_atom(env, c_label_nul) };
}
label_atom
}<|fim▁end|> | /// Returns {ok, Data}, or {error, Reason} if an error occurs.
extern "C" fn image_get_format(env: *mut ErlNifEnv, |
<|file_name|>TransitionQuery.java<|end_file_name|><|fim▁begin|>/**
* This file is part of the CRISTAL-iSE kernel.
* Copyright (c) 2001-2015 The CRISTAL Consortium. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; either version 3 of the License, or (at
* your option) any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; with out even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
*
* http://www.fsf.org/licensing/licenses/lgpl.html
*/
package org.cristalise.kernel.lifecycle.instance.stateMachine;
import lombok.Getter;
import lombok.Setter;
<|fim▁hole|>public class TransitionQuery {
/**
* Name & version of the query to be run by the agent during this transition
*/
String name, version;
public TransitionQuery() {}
public TransitionQuery(String n, String v) {
name = n;
version = v;
}
}<|fim▁end|> | @Getter @Setter |
<|file_name|>test_sched.py<|end_file_name|><|fim▁begin|># Copyright 2015-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import utils_tests
import trappy
sys.path.append(os.path.join(utils_tests.TESTS_DIRECTORY, "..", "trappy"))
class BaseTestSched(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(BaseTestSched, self).__init__(
[("trace_sched.txt", "trace.txt")],
*args,
**kwargs)
class TestSchedLoadAvgSchedGroup(BaseTestSched):
def test_get_dataframe(self):
"""Test that SchedLoadAvgSchedGroup creates a proper data_frame"""
dfr = trappy.Run().sched_load_avg_sched_group.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["cpus"].iloc[0], "00000002")
self.assertEquals(dfr["load"].iloc[0], 0)
self.assertEquals(dfr["utilization"].iloc[0], 0)
class TestSchedLoadAvgTask(BaseTestSched):
def test_get_dataframe(self):
"""Test that SchedLoadAvgTask creates a proper data_frame"""
dfr = trappy.Run().sched_load_avg_task.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["comm"].iloc[0], "sshd")
self.assertEquals(dfr["pid"].iloc[0], 2962)
self.assertEquals(dfr["load"].iloc[0], 0)
self.assertEquals(dfr["utilization"].iloc[0], 0)
self.assertEquals(dfr["runnable_avg_sum"].iloc[0], 0)
self.assertEquals(dfr["running_avg_sum"].iloc[0], 0)
self.assertEquals(dfr["avg_period"].iloc[0], 48595)
class TestSchedLoadAvgCpu(BaseTestSched):
def test_get_dataframe(self):
"""Test that SchedLoadAvgCpu creates a proper data_frame"""
dfr = trappy.Run().sched_load_avg_cpu.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["cpu"].iloc[0], 0)
self.assertEquals(dfr["load"].iloc[0], 13)
self.assertEquals(dfr["utilization"].iloc[0], 18)
class TestSchedContribScaleFactor(BaseTestSched):<|fim▁hole|> def test_get_dataframe(self):
"""Test that SchedContribScaleFactor creates a proper data_frame"""
dfr = trappy.Run().sched_contrib_scale_factor.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["cpu"].iloc[0], 0)
self.assertEquals(dfr["freq_scale_factor"].iloc[0], 426)
self.assertEquals(dfr["cpu_scale_factor"].iloc[0], 1024)
class TestSchedCpuCapacity(BaseTestSched):
def test_get_dataframe(self):
"""Test that SchedCpuCapacity creates a proper data_frame"""
dfr = trappy.Run().sched_cpu_capacity.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["cpu"].iloc[0], 3)
self.assertEquals(dfr["capacity"].iloc[0], 430)
self.assertEquals(dfr["rt_capacity"].iloc[0], 1024)
class TestSchedCpuFrequency(BaseTestSched):
def test_get_dataframe(self):
"""Test that CpuFrequency creates a proper data_frame"""
dfr = trappy.Run().sched_cpu_frequency.data_frame
self.assertTrue(len(dfr) == 1)
self.assertEquals(dfr["cpu"].iloc[0], 0)
self.assertEquals(dfr["state"].iloc[0], 600000)
self.assertFalse("cpu_id" in dfr.columns)
class TestGetFilters(BaseTestSched):
def test_get_filters(self):
"""Test that Run::get_filters returns correct list of filters"""
run = trappy.Run()
classes = run.class_definitions
filters = run.get_filters()
self.assertTrue(len(classes) == len(filters))
self.assertTrue(sorted(classes) == sorted(filters))
sched_classes = run.sched_classes
sched_filters = run.get_filters("sched")
self.assertTrue(len(sched_classes) == len(sched_filters))
self.assertTrue(sorted(sched_classes) == sorted(sched_filters))
class TestSpacedValueAttributes(BaseTestSched):
def test_spaced_value_attr(self):
"""Test that Run object parses spaced value attributes correctly"""
with open("trace.txt", "a") as fout:
fout.write(" <...>-2971 [004] 6550.056871: sched_load_avg_task: comm=AsyncTask #2 pid=6163 ")
dfr = trappy.Run().sched_load_avg_task.data_frame
self.assertTrue(len(dfr) == 2)
self.assertEquals(dfr["comm"].iloc[1], "AsyncTask #2")
self.assertEquals(dfr["pid"].iloc[1], 6163)
class TestNoSchedTraces(utils_tests.SetupDirectory):
def __init__(self, *args, **kwargs):
super(TestNoSchedTraces, self).__init__(
[("trace_empty.txt", "trace.txt")],
*args,
**kwargs)
def test_empty_trace_txt(self):
"""Test that empty objects are created with empty trace file"""
run = trappy.Run()
for attr in run.sched_classes.iterkeys():
self.assertTrue(len(getattr(run, attr).data_frame) == 0)<|fim▁end|> | |
<|file_name|>index.go<|end_file_name|><|fim▁begin|><|fim▁hole|>
import (
"github.com/labstack/echo"
)
// MakeControllers for main
func MakeControllers(e *echo.Echo) {
e.GET("/api", homeController)
}<|fim▁end|> | package controllers |
<|file_name|>OSC_it_IT.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="it_IT">
<context>
<name>ConfigureOSC</name>
<message>
<location filename="configureosc.ui" line="14"/>
<source>Configure OSC Plugin</source>
<translation>Configurazione plugin OSC</translation>
</message>
<message>
<location filename="configureosc.ui" line="20"/>
<location filename="configureosc.ui" line="84"/>
<location filename="configureosc.ui" line="155"/>
<location filename="configureosc.ui" line="162"/>
<source>Output address:</source>
<translation>Indirizzo di uscita:</translation>
</message>
<message>
<location filename="configureosc.ui" line="37"/>
<location filename="configureosc.ui" line="60"/>
<location filename="configureosc.ui" line="108"/>
<location filename="configureosc.ui" line="135"/>
<source>Input port:</source>
<oldsource>Port:</oldsource>
<translation>Porta di ingresso:</translation>
</message>
<message>
<location filename="configureosc.ui" line="70"/>
<source>OSC Network 2</source>
<translation>Rete OSC 2</translation>
</message>
<message>
<location filename="configureosc.ui" line="101"/>
<source>OSC Network 1</source>
<translation>Rete OSC 1</translation>
</message>
<message>
<location filename="configureosc.ui" line="186"/>
<source>OSC Network 3</source>
<translation>Rete OSC 3</translation>
</message>
<message>
<location filename="configureosc.ui" line="193"/>
<source>OSC Network 4</source>
<translation>Rete OSC 4</translation>
</message>
</context>
<context>
<name>OSCPlugin</name>
<message>
<location filename="oscplugin.cpp" line="102"/>
<source>This plugin provides input for devices supporting the OSC transmission protocol.</source>
<translation>Questa plugin permette la ricezione di segnale da dispositivi che supportano il protocollo OSC.</translation>
</message>
<message>
<location filename="oscplugin.cpp" line="188"/>
<location filename="oscplugin.cpp" line="288"/>
<source>OSC Network</source>
<translation>Rete OSC</translation>
</message>
<message>
<location filename="oscplugin.cpp" line="199"/>
<source>Output</source>
<translation>Uscita</translation>
</message>
<message>
<location filename="oscplugin.cpp" line="202"/>
<location filename="oscplugin.cpp" line="302"/>
<source>Status: Not ready</source>
<oldsource>Status: Not open</oldsource>
<translation>Stato: Non pronto</translation>
</message>
<message>
<location filename="oscplugin.cpp" line="205"/>
<source>Address: </source>
<translation>Indirizzo: </translation>
</message>
<message>
<location filename="oscplugin.cpp" line="208"/>
<location filename="oscplugin.cpp" line="305"/>
<source>Status: Ready</source>
<translation>Stato: Pronto</translation>
</message><|fim▁hole|> <translation>Ingresso</translation>
</message>
</context>
</TS><|fim▁end|> | <message>
<location filename="oscplugin.cpp" line="299"/>
<source>Input</source> |
<|file_name|>EXACT_SIZE_UNIQUE_TUPLE.py<|end_file_name|><|fim▁begin|>from rambutan3.check_args.seq.RRangeSizeUniqueSequenceMatcher import RRangeSizeUniqueSequenceMatcher
from rambutan3.check_args.seq.RSequenceEnum import RSequenceEnum
<|fim▁hole|>def EXACT_SIZE_UNIQUE_TUPLE(*, exact_size: int) -> RRangeSizeUniqueSequenceMatcher:
x = RRangeSizeUniqueSequenceMatcher(RSequenceEnum.TUPLE, min_size=exact_size, max_size=exact_size)
return x<|fim▁end|> |
# noinspection PyPep8Naming |
<|file_name|>NURBSPrimitive.cpp<|end_file_name|><|fim▁begin|>//////////////////////////////////////////////////////////////////////////
//
// Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// * Neither the name of Image Engine Design nor the names of any
// other contributors to this software may be used to endorse or
// promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
//////////////////////////////////////////////////////////////////////////
#include "IECore/NURBSPrimitive.h"
#include "IECore/Renderer.h"
#include "IECore/MurmurHash.h"
using namespace std;
using namespace IECore;
using namespace Imath;
using namespace boost;
static IndexedIO::EntryID g_uOrderEntry("uOrder");
static IndexedIO::EntryID g_uKnotEntry("uKnot");
static IndexedIO::EntryID g_uMinEntry("uMin");
static IndexedIO::EntryID g_uMaxEntry("uMax");
static IndexedIO::EntryID g_vOrderEntry("vOrder");
static IndexedIO::EntryID g_vKnotEntry("vKnot");
static IndexedIO::EntryID g_vMinEntry("vMin");
static IndexedIO::EntryID g_vMaxEntry("vMax");
const unsigned int NURBSPrimitive::m_ioVersion = 0;
IE_CORE_DEFINEOBJECTTYPEDESCRIPTION(NURBSPrimitive);
NURBSPrimitive::NURBSPrimitive()
{
vector<float> knot;
knot.push_back( 0 );
knot.push_back( 0 );
knot.push_back( 0 );
knot.push_back( 0.333 );
knot.push_back( 0.666 );
knot.push_back( 1 );
knot.push_back( 1 );
knot.push_back( 1 );
setTopology( 4, new FloatVectorData( knot ), 0, 1, 4, new FloatVectorData( knot ), 0, 1 );
}
NURBSPrimitive::NURBSPrimitive( int uOrder, ConstFloatVectorDataPtr uKnot, float uMin, float uMax,
int vOrder, ConstFloatVectorDataPtr vKnot, float vMin, float vMax, ConstV3fVectorDataPtr p )
{
setTopology( uOrder, uKnot, uMin, uMax, vOrder, vKnot, vMin, vMax );
if( p )
{
V3fVectorDataPtr pData = p->copy();
pData->setInterpretation( GeometricData::Point );
variables.insert( PrimitiveVariableMap::value_type( "P", PrimitiveVariable( PrimitiveVariable::Vertex, pData ) ) );
}
}
int NURBSPrimitive::uOrder() const
{
return m_uOrder;
}
const FloatVectorData *NURBSPrimitive::uKnot() const
{
return m_uKnot.get();
}
float NURBSPrimitive::uMin() const
{
return m_uMin;
}
float NURBSPrimitive::uMax() const
{
return m_uMax;
}
int NURBSPrimitive::uVertices() const
{
return m_uKnot->readable().size() - m_uOrder;
}
int NURBSPrimitive::uSegments() const
{
return 1 + uVertices() - m_uOrder;
}
int NURBSPrimitive::vOrder() const
{
return m_vOrder;
}
const FloatVectorData *NURBSPrimitive::vKnot() const
{
return m_vKnot.get();
}
float NURBSPrimitive::vMin() const
{
return m_vMin;
}
float NURBSPrimitive::vMax() const
{
return m_vMax;
}
int NURBSPrimitive::vVertices() const
{
return m_vKnot->readable().size() - m_vOrder;
}
int NURBSPrimitive::vSegments() const
{
return 1 + vVertices() - m_vOrder;
}
void NURBSPrimitive::setTopology( int uOrder, ConstFloatVectorDataPtr uKnot, float uMin, float uMax,
int vOrder, ConstFloatVectorDataPtr vKnot, float vMin, float vMax )
{
// check order isn't too small
if( uOrder<2 )
{
throw Exception( "Order in u direction too small." );<|fim▁hole|> {
throw Exception( "Order in v direction too small." );
}
// check knots have enough entries for the order.
// an order of N demands at least N control points
// and numKnots==numControlPoints + order
// so we need numKnots>=2*order
if( (int)uKnot->readable().size() < uOrder * 2 )
{
throw Exception( "Not enough knot values in u direction." );
}
if( (int)vKnot->readable().size() < vOrder * 2 )
{
throw Exception( "Not enough knot values in v direction." );
}
// check knots are monotonically increasing
const vector<float> &u = uKnot->readable();
float previous = u[0];
for( unsigned int i=0; i<u.size(); i++ )
{
if( u[i]<previous )
{
throw Exception( "Knots not monotonically increasing in u direction." );
}
previous = u[i];
}
const vector<float> &v = vKnot->readable();
previous = v[0];
for( unsigned int i=0; i<v.size(); i++ )
{
if( v[i]<previous )
{
throw Exception( "Knots not monotonically increasing in v direction." );
}
previous = v[i];
}
// check min and max parametric values are in range
if( uMin > uMax )
{
throw Exception( "uMin greater than uMax." );
}
if( vMin > vMax )
{
throw Exception( "vMin greater than vMax." );
}
if( uMin < u[uOrder-2] )
{
throw Exception( "uMin too small." );
}
if( uMax > u[u.size()-uOrder+1] )
{
throw Exception( "uMax too great." );
}
if( vMin < v[vOrder-2] )
{
throw Exception( "vMin too small." );
}
if( vMax > v[v.size()-vOrder+1] )
{
throw Exception( "vMax too great." );
}
// set everything (taking copies of the data)
m_uOrder = uOrder;
m_uKnot = uKnot->copy();
m_uMin = uMin;
m_uMax = uMax;
m_vOrder = vOrder;
m_vKnot = vKnot->copy();
m_vMin = vMin;
m_vMax = vMax;
}
size_t NURBSPrimitive::variableSize( PrimitiveVariable::Interpolation interpolation ) const
{
switch( interpolation )
{
case PrimitiveVariable::Constant :
return 1;
case PrimitiveVariable::Uniform :
return uSegments() * vSegments();
case PrimitiveVariable::Vertex :
return uVertices() * vVertices();
case PrimitiveVariable::Varying:
case PrimitiveVariable::FaceVarying:
return (uSegments()+1) * (vSegments()+1);
default :
return 0;
}
}
void NURBSPrimitive::render( Renderer *renderer ) const
{
renderer->nurbs( m_uOrder, m_uKnot, m_uMin, m_uMax, m_vOrder, m_vKnot, m_vMin, m_vMax, variables );
}
void NURBSPrimitive::copyFrom( const Object *other, IECore::Object::CopyContext *context )
{
Primitive::copyFrom( other, context );
const NURBSPrimitive *tOther = static_cast<const NURBSPrimitive *>( other );
m_uOrder = tOther->m_uOrder;
m_uKnot = context->copy<FloatVectorData>( tOther->m_uKnot );
m_uMin = tOther->m_uMin;
m_uMax = tOther->m_uMax;
m_vOrder = tOther->m_vOrder;
m_vKnot = context->copy<FloatVectorData>( tOther->m_vKnot );
m_vMin = tOther->m_vMin;
m_vMax = tOther->m_vMax;
}
void NURBSPrimitive::save( IECore::Object::SaveContext *context ) const
{
Primitive::save(context);
IndexedIOPtr container = context->container( staticTypeName(), m_ioVersion );
container->write( g_uOrderEntry, m_uOrder );
context->save( m_uKnot, container, g_uKnotEntry );
container->write( g_uMinEntry, m_uMin );
container->write( g_uMaxEntry, m_uMax );
container->write( g_vOrderEntry, m_vOrder );
context->save( m_vKnot, container, g_vKnotEntry );
container->write( g_vMinEntry, m_vMin );
container->write( g_vMaxEntry, m_vMax );
}
void NURBSPrimitive::load( IECore::Object::LoadContextPtr context )
{
Primitive::load(context);
unsigned int v = m_ioVersion;
ConstIndexedIOPtr container = context->container( staticTypeName(), v );
container->read( g_uOrderEntry, m_uOrder );
m_uKnot = context->load<FloatVectorData>( container, g_uKnotEntry );
container->read( g_uMinEntry, m_uMin );
container->read( g_uMaxEntry, m_uMax );
container->read( g_vOrderEntry, m_vOrder );
m_vKnot = context->load<FloatVectorData>( container, g_vKnotEntry );
container->read( g_vMinEntry, m_vMin );
container->read( g_vMaxEntry, m_vMax );
}
bool NURBSPrimitive::isEqualTo( const Object *other ) const
{
if( !Primitive::isEqualTo( other ) )
{
return false;
}
const NURBSPrimitive *tOther = static_cast<const NURBSPrimitive *>( other );
if( m_uOrder!=tOther->m_uOrder )
{
return false;
}
if( m_vOrder!=tOther->m_vOrder )
{
return false;
}
if( m_uMin!=tOther->m_uMin )
{
return false;
}
if( m_vMin!=tOther->m_vMin )
{
return false;
}
if( m_uMax!=tOther->m_uMax )
{
return false;
}
if( m_vMax!=tOther->m_vMax )
{
return false;
}
if( !m_uKnot->isEqualTo( tOther->m_uKnot ) )
{
return false;
}
if( !m_vKnot->isEqualTo( tOther->m_vKnot ) )
{
return false;
}
return true;
}
void NURBSPrimitive::memoryUsage( Object::MemoryAccumulator &a ) const
{
Primitive::memoryUsage( a );
a.accumulate( sizeof( m_uOrder ) * 2 );
a.accumulate( sizeof( m_uMin ) * 4 );
a.accumulate( m_uKnot );
a.accumulate( m_vKnot );
}
void NURBSPrimitive::hash( MurmurHash &h ) const
{
Primitive::hash( h );
}
void NURBSPrimitive::topologyHash( MurmurHash &h ) const
{
h.append( m_uOrder );
m_uKnot->hash( h );
h.append( m_uMin );
h.append( m_uMax );
h.append( m_vOrder );
m_vKnot->hash( h );
h.append( m_vMin );
h.append( m_vMax );
}<|fim▁end|> | }
if( vOrder<2 ) |
<|file_name|>issue-14589.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// All 3 expressions should work in that the argument gets
// coerced to a trait object
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
fn main() {
send::<Box<Foo>>(Box::new(Output(0)));
Test::<Box<Foo>>::foo(Box::new(Output(0)));
Test::<Box<Foo>>::new().send(Box::new(Output(0)));
}
fn send<T>(_: T) {}
struct Test<T> { marker: std::marker::PhantomData<T> }
impl<T> Test<T> {
fn new() -> Test<T> { Test { marker: ::std::marker::PhantomData } }
fn foo(_: T) {}
fn send(&self, _: T) {}<|fim▁hole|>}
trait Foo { fn dummy(&self) { }}
struct Output(int);
impl Foo for Output {}<|fim▁end|> | |
<|file_name|>maas.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Adam Stokes <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, UbuntuPlugin
class Maas(Plugin, UbuntuPlugin):
"""Ubuntu Metal-As-A-Service
"""
plugin_name = 'maas'
profiles = ('sysmgmt',)
option_list = [
('profile-name',
'The name with which you will later refer to this remote', '', False),
('url', 'The URL of the remote API', '', False),
('credentials',
'The credentials, also known as the API key', '', False)
]
def _has_login_options(self):
return self.get_option("url") and self.get_option("credentials") \
and self.get_option("profile-name")
def _remote_api_login(self):
ret = self.call_ext_prog("maas login %s %s %s" % (
self.get_option("profile-name"),
self.get_option("url"),
self.get_option("credentials")))
return ret['status'] == 0
def setup(self):
self.add_copy_spec([
"/etc/squid-deb-proxy",
"/etc/maas",
"/var/lib/maas/dhcp*",<|fim▁hole|> "/var/log/apache2*",
"/var/log/maas*",
"/var/log/upstart/maas-*",
])
self.add_cmd_output([
"apt-cache policy maas-*",
"apt-cache policy python-django-*",
])
if self.is_installed("maas-region-controller"):
self.add_cmd_output([
"maas-region-admin dumpdata",
])
if self._has_login_options():
if self._remote_api_login():
self.add_cmd_output("maas %s commissioning-results list" %
self.get_option("profile-name"))
else:
self._log_error(
"Cannot login into MAAS remote API with provided creds.")
# vim: set et ts=4 sw=4 :<|fim▁end|> | |
<|file_name|>quote.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use ast;
use codemap::Span;
use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::token::*;
use parse::token;
use parse;
/**
*
* Quasiquoting works via token trees.
*
* This is registered as a set of expression syntax extension called quote!
* that lifts its argument token-tree to an AST representing the
* construction of the same token tree, with ast::TTNonterminal nodes
* interpreted as antiquotes (splices).
*
*/
pub mod rt {
use ast;
use ext::base::ExtCtxt;
use parse;
use print::pprust;
pub use ast::*;
pub use parse::token::*;
pub use parse::new_parser_from_tts;
pub use codemap::{BytePos, Span, dummy_spanned};
pub trait ToTokens {
fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree];
}
impl ToTokens for ~[TokenTree] {
fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree] {
(*self).clone()
}
}
/* Should be (when bugs in default methods are fixed):
trait ToSource : ToTokens {
// Takes a thing and generates a string containing rust code for it.
pub fn to_source() -> ~str;
// If you can make source, you can definitely make tokens.
pub fn to_tokens(cx: &ExtCtxt) -> ~[TokenTree] {
cx.parse_tts(self.to_source())
}
}
*/
pub trait ToSource {
// Takes a thing and generates a string containing rust code for it.
fn to_source(&self) -> @str;
}
impl ToSource for ast::Ident {
fn to_source(&self) -> @str {
ident_to_str(self)
}
}
impl ToSource for @ast::Item {
fn to_source(&self) -> @str {
pprust::item_to_str(*self, get_ident_interner()).to_managed()
}
}
impl<'a> ToSource for &'a [@ast::Item] {
fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect("\n\n").to_managed()
}
}
impl ToSource for ast::Ty {
fn to_source(&self) -> @str {
pprust::ty_to_str(self, get_ident_interner()).to_managed()
}
}
impl<'a> ToSource for &'a [ast::Ty] {
fn to_source(&self) -> @str {
self.map(|i| i.to_source()).connect(", ").to_managed()
}
}
impl ToSource for Generics {
fn to_source(&self) -> @str {
pprust::generics_to_str(self, get_ident_interner()).to_managed()
}
}
impl ToSource for @ast::Expr {
fn to_source(&self) -> @str {
pprust::expr_to_str(*self, get_ident_interner()).to_managed()
}
}
impl ToSource for ast::Block {
fn to_source(&self) -> @str {
pprust::block_to_str(self, get_ident_interner()).to_managed()
}
}
impl<'a> ToSource for &'a str {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitStr(self.to_managed(), ast::CookedStr));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for int {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for i8 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI8));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for i16 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI16));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for i32 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI32));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for i64 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitInt(*self as i64, ast::TyI64));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for uint {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for u8 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU8));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for u16 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU16));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for u32 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU32));
pprust::lit_to_str(&lit).to_managed()
}
}
impl ToSource for u64 {
fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitUint(*self as u64, ast::TyU64));
pprust::lit_to_str(&lit).to_managed()
}
}
// Alas ... we write these out instead. All redundant.
macro_rules! impl_to_tokens(
($t:ty) => (
impl ToTokens for $t {
fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] {
cx.parse_tts(self.to_source())
}
}
)
)
macro_rules! impl_to_tokens_self(
($t:ty) => (
impl<'a> ToTokens for $t {
fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] {
cx.parse_tts(self.to_source())
}
}
)
)
impl_to_tokens!(ast::Ident)
impl_to_tokens!(@ast::Item)
impl_to_tokens_self!(&'a [@ast::Item])
impl_to_tokens!(ast::Ty)
impl_to_tokens_self!(&'a [ast::Ty])
impl_to_tokens!(Generics)
impl_to_tokens!(@ast::Expr)
impl_to_tokens!(ast::Block)
impl_to_tokens_self!(&'a str)
impl_to_tokens!(int)
impl_to_tokens!(i8)
impl_to_tokens!(i16)
impl_to_tokens!(i32)
impl_to_tokens!(i64)
impl_to_tokens!(uint)
impl_to_tokens!(u8)
impl_to_tokens!(u16)
impl_to_tokens!(u32)
impl_to_tokens!(u64)
pub trait ExtParseUtils {
fn parse_item(&self, s: @str) -> @ast::Item;
fn parse_expr(&self, s: @str) -> @ast::Expr;
fn parse_stmt(&self, s: @str) -> @ast::Stmt;
fn parse_tts(&self, s: @str) -> ~[ast::TokenTree];
}
impl ExtParseUtils for ExtCtxt {
fn parse_item(&self, s: @str) -> @ast::Item {
let res = parse::parse_item_from_source_str(
@"<quote expansion>",
s,
self.cfg(),
~[],
self.parse_sess());
match res {
Some(ast) => ast,
None => {
error!("Parse error with ```\n{}\n```", s);
fail!()
}
}
}
fn parse_stmt(&self, s: @str) -> @ast::Stmt {
parse::parse_stmt_from_source_str(
@"<quote expansion>",
s,
self.cfg(),
~[],
self.parse_sess())
}
fn parse_expr(&self, s: @str) -> @ast::Expr {
parse::parse_expr_from_source_str(
@"<quote expansion>",
s,
self.cfg(),
self.parse_sess())
}
fn parse_tts(&self, s: @str) -> ~[ast::TokenTree] {
parse::parse_tts_from_source_str(
@"<quote expansion>",
s,
self.cfg(),
self.parse_sess())
}
}
}
pub fn expand_quote_tokens(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let (cx_expr, expr) = expand_tts(cx, sp, tts);
let expanded = expand_wrapper(cx, sp, cx_expr, expr);
base::MRExpr(expanded)
}
pub fn expand_quote_expr(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let expanded = expand_parse_call(cx, sp, "parse_expr", ~[], tts);
base::MRExpr(expanded)
}
pub fn expand_quote_item(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let e_attrs = cx.expr_vec_uniq(sp, ~[]);
let expanded = expand_parse_call(cx, sp, "parse_item",
~[e_attrs], tts);
base::MRExpr(expanded)
}
pub fn expand_quote_pat(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let e_refutable = cx.expr_lit(sp, ast::LitBool(true));
let expanded = expand_parse_call(cx, sp, "parse_pat",
~[e_refutable], tts);
base::MRExpr(expanded)
}
pub fn expand_quote_ty(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let e_param_colons = cx.expr_lit(sp, ast::LitBool(false));
let expanded = expand_parse_call(cx, sp, "parse_ty",
~[e_param_colons], tts);
base::MRExpr(expanded)
}
pub fn expand_quote_stmt(cx: &mut ExtCtxt,
sp: Span,
tts: &[ast::TokenTree]) -> base::MacResult {
let e_attrs = cx.expr_vec_uniq(sp, ~[]);
let expanded = expand_parse_call(cx, sp, "parse_stmt",
~[e_attrs], tts);
base::MRExpr(expanded)
}
fn ids_ext(strs: ~[~str]) -> ~[ast::Ident] {
strs.map(|str| str_to_ident(*str))
}
fn id_ext(str: &str) -> ast::Ident {
str_to_ident(str)
}
// Lift an ident to the expr that evaluates to that ident.
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
let e_str = cx.expr_str(sp, cx.str_of(ident));
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("ident_of"),
~[e_str])
}
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr {
let name = match bop {
PLUS => "PLUS",
MINUS => "MINUS",
STAR => "STAR",
SLASH => "SLASH",
PERCENT => "PERCENT",
CARET => "CARET",
AND => "AND",
OR => "OR",
SHL => "SHL",
SHR => "SHR"
};
cx.expr_ident(sp, id_ext(name))
}
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
match *tok {
BINOP(binop) => {
return cx.expr_call_ident(sp,
id_ext("BINOP"),
~[mk_binop(cx, sp, binop)]);
}
BINOPEQ(binop) => {
return cx.expr_call_ident(sp,
id_ext("BINOPEQ"),
~[mk_binop(cx, sp, binop)]);
}
LIT_CHAR(i) => {
let e_char = cx.expr_lit(sp, ast::LitChar(i));
return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), ~[e_char]);
}
LIT_INT(i, ity) => {
let s_ity = match ity {
ast::TyI => ~"TyI",
ast::TyI8 => ~"TyI8",
ast::TyI16 => ~"TyI16",
ast::TyI32 => ~"TyI32",
ast::TyI64 => ~"TyI64"
};
let e_ity = cx.expr_ident(sp, id_ext(s_ity));
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
return cx.expr_call_ident(sp,
id_ext("LIT_INT"),
~[e_i64, e_ity]);
}
LIT_UINT(u, uty) => {
let s_uty = match uty {
ast::TyU => ~"TyU",
ast::TyU8 => ~"TyU8",
ast::TyU16 => ~"TyU16",
ast::TyU32 => ~"TyU32",
ast::TyU64 => ~"TyU64"
};
let e_uty = cx.expr_ident(sp, id_ext(s_uty));
let e_u64 = cx.expr_lit(sp, ast::LitUint(u, ast::TyU64));
return cx.expr_call_ident(sp,
id_ext("LIT_UINT"),
~[e_u64, e_uty]);
}
LIT_INT_UNSUFFIXED(i) => {
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
return cx.expr_call_ident(sp,
id_ext("LIT_INT_UNSUFFIXED"),
~[e_i64]);
}
LIT_FLOAT(fident, fty) => {
let s_fty = match fty {
ast::TyF32 => ~"TyF32",
ast::TyF64 => ~"TyF64"
};
let e_fty = cx.expr_ident(sp, id_ext(s_fty));
let e_fident = mk_ident(cx, sp, fident);
return cx.expr_call_ident(sp,
id_ext("LIT_FLOAT"),
~[e_fident, e_fty]);
}
LIT_STR(ident) => {
return cx.expr_call_ident(sp,
id_ext("LIT_STR"),
~[mk_ident(cx, sp, ident)]);
}
LIT_STR_RAW(ident, n) => {
return cx.expr_call_ident(sp,
id_ext("LIT_STR_RAW"),
~[mk_ident(cx, sp, ident),
cx.expr_uint(sp, n)]);
}
IDENT(ident, b) => {
return cx.expr_call_ident(sp,
id_ext("IDENT"),
~[mk_ident(cx, sp, ident),
cx.expr_bool(sp, b)]);
}
LIFETIME(ident) => {
return cx.expr_call_ident(sp,
id_ext("LIFETIME"),
~[mk_ident(cx, sp, ident)]);
}
DOC_COMMENT(ident) => {
return cx.expr_call_ident(sp,
id_ext("DOC_COMMENT"),
~[mk_ident(cx, sp, ident)]);
}
INTERPOLATED(_) => fail!("quote! with interpolated token"),
_ => ()
}
let name = match *tok {
EQ => "EQ",
LT => "LT",
LE => "LE",
EQEQ => "EQEQ",
NE => "NE",
GE => "GE",
GT => "GT",
ANDAND => "ANDAND",
OROR => "OROR",
NOT => "NOT",
TILDE => "TILDE",
AT => "AT",
DOT => "DOT",
DOTDOT => "DOTDOT",
COMMA => "COMMA",
SEMI => "SEMI",
COLON => "COLON",
MOD_SEP => "MOD_SEP",
RARROW => "RARROW",
LARROW => "LARROW",
DARROW => "DARROW",
FAT_ARROW => "FAT_ARROW",
LPAREN => "LPAREN",
RPAREN => "RPAREN",
LBRACKET => "LBRACKET",
RBRACKET => "RBRACKET",
LBRACE => "LBRACE",
RBRACE => "RBRACE",
POUND => "POUND",
DOLLAR => "DOLLAR",
UNDERSCORE => "UNDERSCORE",
EOF => "EOF",
_ => fail!()
};
cx.expr_ident(sp, id_ext(name))
}
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] {
match *tt {
ast::TTTok(sp, ref tok) => {
let e_sp = cx.expr_ident(sp, id_ext("sp"));
let e_tok = cx.expr_call_ident(sp,
id_ext("TTTok"),
~[e_sp, mk_token(cx, sp, tok)]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push"),
~[e_tok]);
~[cx.stmt_expr(e_push)]
}
ast::TTDelim(ref tts) => mk_tts(cx, sp, **tts),
ast::TTSeq(..) => fail!("TTSeq in quote!"),
ast::TTNonterminal(sp, ident) => {
// tt.push_all_move($ident.to_tokens(ext_cx))
let e_to_toks =
cx.expr_method_call(sp,
cx.expr_ident(sp, ident),
id_ext("to_tokens"),
~[cx.expr_ident(sp, id_ext("ext_cx"))]);
let e_push =
cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("tt")),
id_ext("push_all_move"),
~[e_to_toks]);
~[cx.stmt_expr(e_push)]
}
}
}
fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> ~[@ast::Stmt] {
let mut ss = ~[];
for tt in tts.iter() {
ss.push_all_move(mk_tt(cx, sp, tt));
}
ss
}
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> (@ast::Expr, @ast::Expr) {
// NB: It appears that the main parser loses its mind if we consider
// $foo as a TTNonterminal during the main parse, so we have to re-parse
// under quote_depth > 0. This is silly and should go away; the _guess_ is
// it has to do with transition away from supporting old-style macros, so
// try removing it when enough of them are gone.
let mut p = parse::new_parser_from_tts(cx.parse_sess(),
cx.cfg(),
tts.to_owned());
p.quote_depth += 1u;
let cx_expr = p.parse_expr();
if !p.eat(&token::COMMA) {
p.fatal("Expected token `,`");
}
let tts = p.parse_all_token_trees();
p.abort_if_errors();
// We also bind a single value, sp, to ext_cx.call_site()
//
// This causes every span in a token-tree quote to be attributed to the
// call site of the extension using the quote. We can't really do much<|fim▁hole|> //
// The old quasiquoter had an elaborate mechanism for denoting input
// file locations from which quotes originated; unfortunately this
// relied on feeding the source string of the quote back into the
// compiler (which we don't really want to do) and, in any case, only
// pushed the problem a very small step further back: an error
// resulting from a parse of the resulting quote is still attributed to
// the site the string literal occurred, which was in a source file
// _other_ than the one the user has control over. For example, an
// error in a quote from the protocol compiler, invoked in user code
// using macro_rules! for example, will be attributed to the macro_rules.rs
// file in libsyntax, which the user might not even have source to (unless
// they happen to have a compiler on hand). Over all, the phase distinction
// just makes quotes "hard to attribute". Possibly this could be fixed
// by recreating some of the original qq machinery in the tt regime
// (pushing fake FileMaps onto the parser to account for original sites
// of quotes, for example) but at this point it seems not likely to be
// worth the hassle.
let e_sp = cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("call_site"),
~[]);
let stmt_let_sp = cx.stmt_let(sp, false,
id_ext("sp"),
e_sp);
let stmt_let_tt = cx.stmt_let(sp, true,
id_ext("tt"),
cx.expr_vec_uniq(sp, ~[]));
let block = cx.expr_block(
cx.block_all(sp,
~[],
~[stmt_let_sp, stmt_let_tt] + mk_tts(cx, sp, tts),
Some(cx.expr_ident(sp, id_ext("tt")))));
(cx_expr, block)
}
fn expand_wrapper(cx: &ExtCtxt,
sp: Span,
cx_expr: @ast::Expr,
expr: @ast::Expr) -> @ast::Expr {
let uses = ~[ cx.view_use_glob(sp, ast::Public,
ids_ext(~[~"syntax",
~"ext",
~"quote",
~"rt"])) ];
let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr);
cx.expr_block(cx.block_all(sp, uses, ~[stmt_let_ext_cx], Some(expr)))
}
fn expand_parse_call(cx: &ExtCtxt,
sp: Span,
parse_method: &str,
arg_exprs: ~[@ast::Expr],
tts: &[ast::TokenTree]) -> @ast::Expr {
let (cx_expr, tts_expr) = expand_tts(cx, sp, tts);
let cfg_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("cfg"), ~[]);
let parse_sess_call = || cx.expr_method_call(
sp, cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("parse_sess"), ~[]);
let new_parser_call =
cx.expr_call(sp,
cx.expr_ident(sp, id_ext("new_parser_from_tts")),
~[parse_sess_call(), cfg_call(), tts_expr]);
let expr = cx.expr_method_call(sp, new_parser_call, id_ext(parse_method),
arg_exprs);
expand_wrapper(cx, sp, cx_expr, expr)
}<|fim▁end|> | // better since the source of the quote may well be in a library that
// was not even parsed by this compilation run, that the user has no
// source code for (eg. in libsyntax, which they're just _using_). |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright (c) 2013-2014 Red Hat
# Author: Cleber Rosa <[email protected]>
# pylint: disable=E0611
from distutils.core import setup
from sphinx.setup_command import BuildDoc
import arc.version
setup(name='arc',
version=arc.version.VERSION,
description='Autotest RPC Client',
author='Cleber Rosa',
author_email='[email protected]',
url='http://autotest.github.com',
requires=['pygments'],
packages=['arc',
'arc.cli',
'arc.cli.args',
'arc.cli.actions',
'arc.shared',
'arc.tko'],<|fim▁hole|> scripts=['scripts/arcli'])<|fim▁end|> | data_files=[('/etc/', ['data/arc.conf'])],
cmdclass={'build_doc': BuildDoc},
command_options={'build_doc': {'source_dir':
('setup.py', 'docs/source')}}, |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# SymPy documentation build configuration file, created by
# sphinx-quickstart.py on Sat Mar 22 19:34:32 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys
# If your extensions are in another directory, add it here.
sys.path.extend(['../sympy', 'ext'])
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.addons.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.mathjax',
'numpydoc', 'sympylive',]
# Use this to use pngmath instead
#extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.pngmath', ]
# MathJax file, which is free to use. See http://www.mathjax.org/docs/2.0/start.html
mathjax_path = 'https://c328740.ssl.cf1.rackcdn.com/mathjax/latest/MathJax.js?config=TeX-AMS_HTML-full'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General substitutions.
project = 'SymPy'
copyright = '2008, 2009, 2010, 2011, 2012 SymPy Development Team'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
version = '0.7.2'
# The full version, including alpha/beta/rc tags.
release = '0.7.2-git'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# Translations:
locale_dirs = ["i18n/"]
# List of documents that shouldn't be included in the build.
#unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
html_style = 'default.css'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
html_logo = '_static/sympylogo.png'
html_favicon = '../logo/SymPy-Favicon.ico'
html_theme_options = {'collapsiblesidebar': True}
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
#html_index = ''
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# Output file base name for HTML help builder.
htmlhelp_basename = 'SymPydoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual], toctree_only).
# toctree_only is set to True so that the start file document itself is not included in the
# output, only the documents referenced by it via TOC trees. The extra stuff in the master
# document is intended to show up in the HTML, but doesn't really belong in the LaTeX output.
latex_documents = [('index', 'sympy-%s.tex' % release, 'SymPy Documentation',
'SymPy Development Team', 'manual', True)]
# Additional stuff for the LaTeX preamble.
# Tweaked to work with XeTeX.
latex_elements = {
'babel': '',
'fontenc': r'''
\usepackage{amssymb}
\usepackage{fontspec}
\defaultfontfeatures{Mapping=tex-text}
\setmainfont{DejaVu Serif}
\setsansfont{DejaVu Sans}
\setmonofont{DejaVu Sans Mono}
''',
'fontpkg': '',
'inputenc': '',<|fim▁hole|> 'utf8extra': '',
'preamble': ''
}
# SymPy logo on title page
latex_logo = '_static/sympylogo.png'
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# Show page numbers next to internal references
latex_show_pagerefs = True
# We use False otherwise the module index gets generated twice.
latex_use_modindex = False
default_role = 'math'
pngmath_divpng_args = ['-gamma 1.5','-D 110']
# Note, this is ignored by the mathjax extension
# Any \newcommand should be defined in the file
pngmath_latex_preamble = '\\usepackage{amsmath}\n'+\
'\\usepackage{bm}\n'+\
'\\usepackage{amsfonts}\n'+\
'\\usepackage{amssymb}\n'+\
'\\setlength{\\parindent}{0pt}\n'
texinfo_documents = [
(master_doc, 'sympy', 'SymPy Documentation',
'SymPy Development Team',
'SymPy', 'Computer algebra system (CAS) in Python', 'Programming',
1),
]<|fim▁end|> | |
<|file_name|>controls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2010 Trever Fischer <[email protected]>
#
# This file is part of modulation.
#
# modulation is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# modulation is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with modulation. If not, see <http://www.gnu.org/licenses/>.
from modulation import Packet, Plugin
import threading
class ControlPacket(Packet):
"""A ControlPacket gives some kind of control signal to a child plugin, such as "STOP", "PLAY", "SEARCH", "VOLUME", etc
If a plugin handles the packet according to its intent (eg stopping playback on a stop packet), then the
original packet must be forwarded with send(). If a plugin then does other actions (such as switching to
a different stream and then playing), a new packet must be sent as well.
If a plugin does something of its own accord (eg nobody told it to stop, but it is out of data so it must
stop anyways), a new packet must be sent.
"""
def __init__(self, origin=None, data=None):
Packet.__init__(self, origin)
self.__data = data
def getData(self):
return self.__data
data = property(getData, None, None, "The data associated with this specific control packet type.")
class Start(ControlPacket):
"""Start some operation"""
pass
class Stop(ControlPacket):
"""Stop doing some operation"""
pass
class Pause(ControlPacket):<|fim▁hole|> pass
class Next(ControlPacket):
"""Skip the current operation"""
pass
class Prev(ControlPacket):
"""Go back to the previous operation"""
pass
class Enqueue(ControlPacket):
"""Passes along a source to enqueue"""
pass
class Load(ControlPacket):
"""Uses the 'uri' data element to indicate loading of data"""
pass
class Seek(ControlPacket):
"""Uses the 'location' data element"""
pass
class Exit(ControlPacket):
"""Indicates a plugin upstream has exited and is no longer part of the graph"""
pass
class PacketDelay(Plugin):
"""PacketDelays are used to wait until a packet of some type has been recieved"""
def __init__(self, packetType):
Plugin.__init__(self)
self.__type = packetType
self.__lock = threading.Event()
def handlePacket(self, pkt):
super(PacketDelay, self).handlePacket(pkt)
if (isinstance(pkt, self.__type)):
self.__lock.set()
def wait():
self.__lock.wait()<|fim▁end|> | """Pause something that can be continued later""" |
<|file_name|>env.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and<|fim▁hole|>// limitations under the License.
package adapterManager
import (
"istio.io/mixer/pkg/adapter"
"istio.io/mixer/pkg/pool"
)
type env struct {
logger adapter.Logger
gp *pool.GoroutinePool
}
func newEnv(aspect string, gp *pool.GoroutinePool) adapter.Env {
return env{
logger: newLogger(aspect),
gp: gp,
}
}
func (e env) Logger() adapter.Logger {
return e.logger
}
func (e env) ScheduleWork(fn adapter.WorkFunc) {
e.gp.ScheduleWork(func() {
defer func() {
if r := recover(); r != nil {
_ = e.Logger().Errorf("Adapter worker failed: %v", r)
// TODO: Beyond logging, we want to do something proactive here.
// For example, we want to probably terminate the originating
// adapter and record the failure so we can count how often
// it happens, etc.
}
}()
fn()
})
}
func (e env) ScheduleDaemon(fn adapter.DaemonFunc) {
go func() {
defer func() {
if r := recover(); r != nil {
_ = e.Logger().Errorf("Adapter daemon failed: %v", r)
// TODO: Beyond logging, we want to do something proactive here.
// For example, we want to probably terminate the originating
// adapter and record the failure so we can count how often
// it happens, etc.
}
}()
fn()
}()
}<|fim▁end|> | |
<|file_name|>0012_auto_20151230_1853.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2015-12-30 17:53
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('wunderlist', '0011_auto_20151230_1843'),
]
operations = [
migrations.AlterField(
model_name='connection',
name='owner',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='connections', to=settings.AUTH_USER_MODEL),
),
]<|fim▁end|> | |
<|file_name|>packet.py<|end_file_name|><|fim▁begin|>import logging
import rlp
from utils import big_endian_to_int as idec
from utils import int_to_big_endian4 as ienc4
from utils import int_to_big_endian as ienc
from utils import recursive_int_to_big_endian
import dispatch
import sys
import signals
logger = logging.getLogger(__name__)
def lrlp_decode(data):
"always return a list"
d = rlp.decode(data)
if isinstance(d, str):
d = [d]
return d
def load_packet(packet):
return Packeter.load_packet(packet)
class Packeter(object):
"""
Translates between the network and the local data
https://github.com/ethereum/wiki/wiki/%5BEnglish%5D-Wire-Protocol
stateless!
.. note::
#. Can only be used after the `config` method is called
'''
"""
cmd_map = dict(((0x00, 'Hello'),
(0x01, 'Disconnect'),
(0x02, 'Ping'),
(0x03, 'Pong'),
(0x10, 'GetPeers'),
(0x11, 'Peers'),
(0x12, 'Transactions'),
(0x13, 'Blocks'),
(0x14, 'GetChain'),
(0x15, 'NotInChain'),
(0x16, 'GetTransactions')))
cmd_map_by_name = dict((v, k) for k, v in cmd_map.items())
disconnect_reasons_map = dict((
('Disconnect requested', 0x00),
('TCP sub-system error', 0x01),
('Bad protocol', 0x02),
('Useless peer', 0x03),
('Too many peers', 0x04),
('Already connected', 0x05),
('Wrong genesis block', 0x06),
('Incompatible network protocols', 0x07),
('Client quitting', 0x08)))
disconnect_reasons_map_by_id = \
dict((v, k) for k, v in disconnect_reasons_map.items())
SYNCHRONIZATION_TOKEN = 0x22400891
PROTOCOL_VERSION = 19
# is the node s Unique Identifier and is the 512-bit hash that serves to
# identify the node.
NETWORK_ID = 0
CLIENT_ID = 'Ethereum(py)/0.5.2/%s/Protocol:%d' % (sys.platform,
PROTOCOL_VERSION)
CAPABILITIES = 0x01 + 0x02 + 0x04 # node discovery + transaction relaying
def __init__(self):
pass
def configure(self, config):
self.config = config
self.CLIENT_ID = self.config.get('network', 'client_id') \
or self.CLIENT_ID
self.NODE_ID = self.config.get('network', 'node_id')
@classmethod
def load_packet(cls, packet):
'''
Though TCP provides a connection-oriented medium, Ethereum nodes
communicate in terms of packets. These packets are formed as a 4-byte
synchronisation token (0x22400891), a 4-byte "payload size", to be
interpreted as a big-endian integer and finally an N-byte
RLP-serialised data structure, where N is the aforementioned
"payload size". To be clear, the payload size specifies the number of
bytes in the packet ''following'' the first 8.
:return: (success, result), where result should be None when fail,
and (header, payload_len, cmd, data) when success
'''
header = idec(packet[:4])
if header != cls.SYNCHRONIZATION_TOKEN:
return False, 'check header failed, skipping message,'\
'sync token was hex: {0:x}'.format(header)
try:
payload_len = idec(packet[4:8])
except Exception as e:
return False, str(e)
if len(packet) < payload_len + 8:
return False, 'Packet is broken'
try:
payload = lrlp_decode(packet[8:8 + payload_len])
except Exception as e:
return False, str(e)
if (not len(payload)) or (idec(payload[0]) not in cls.cmd_map):
return False, 'check cmd failed'
cmd = Packeter.cmd_map.get(idec(payload[0]))
remain = packet[8 + payload_len:]
return True, (header, payload_len, cmd, payload[1:], remain)
def load_cmd(self, packet):
success, res = self.load_packet(packet)
if not success:
raise Exception(res)
_, _, cmd, data, remain = res
return cmd, data, remain
@classmethod
def dump_packet(cls, data):
"""
4-byte synchronisation token, (0x22400891),
a 4-byte "payload size", to be interpreted as a big-endian integer
an N-byte RLP-serialised data structure
"""
payload = rlp.encode(recursive_int_to_big_endian(data))
packet = ienc4(cls.SYNCHRONIZATION_TOKEN)
packet += ienc4(len(payload))
packet += payload
return packet
def dump_Hello(self):
# inconsistency here!
# spec says CAPABILITIES, LISTEN_PORT but code reverses
"""
[0x00, PROTOCOL_VERSION, NETWORK_ID, CLIENT_ID, CAPABILITIES,
LISTEN_PORT, NODE_ID]
First packet sent over the connection, and sent once by both sides.
No other messages may be sent until a Hello is received.
PROTOCOL_VERSION is one of:
0x00 for PoC-1;<|fim▁hole|> 0x07 for PoC-3.
0x08 sent by Ethereum(++)/v0.3.11/brew/Darwin/unknown
NETWORK_ID should be 0.
CLIENT_ID Specifies the client software identity, as a human-readable
string (e.g. "Ethereum(++)/1.0.0").
LISTEN_PORT specifies the port that the client is listening on
(on the interface that the present connection traverses).
If 0 it indicates the client is not listening.
CAPABILITIES specifies the capabilities of the client as a set of
flags; presently three bits are used:
0x01 for peers discovery,
0x02 for transaction relaying,
0x04 for block-chain querying.
NODE_ID is optional and specifies a 512-bit hash, (potentially to be
used as public key) that identifies this node.
"""
data = [self.cmd_map_by_name['Hello'],
self.PROTOCOL_VERSION,
self.NETWORK_ID,
self.CLIENT_ID,
self.config.getint('network', 'listen_port'),
self.CAPABILITIES,
self.NODE_ID
]
return self.dump_packet(data)
def dump_Ping(self):
data = [self.cmd_map_by_name['Ping']]
return self.dump_packet(data)
def dump_Pong(self):
data = [self.cmd_map_by_name['Pong']]
return self.dump_packet(data)
def dump_Disconnect(self, reason=None):
data = [self.cmd_map_by_name['Disconnect']]
if reason:
data.append(self.disconnect_reasons_map[reason])
return self.dump_packet(data)
def dump_GetPeers(self):
data = [self.cmd_map_by_name['GetPeers']]
return self.dump_packet(data)
def dump_Peers(self, peers):
'''
:param peers: a sequence of (ip, port, pid)
:return: None if no peers
'''
data = [self.cmd_map_by_name['Peers']]
for ip, port, pid in peers:
assert ip.count('.') == 3
ip = ''.join(chr(int(x)) for x in ip.split('.'))
data.append([ip, port, pid])
return self.dump_packet(data)
def dump_Transactions(self, transactions):
data = [self.cmd_map_by_name['Transactions']] + transactions
return self.dump_packet(data)
def dump_GetTransactions(self):
"""
[0x12, [nonce, receiving_address, value, ... ], ... ]
Specify (a) transaction(s) that the peer should make sure is included
on its transaction queue. The items in the list (following the first
item 0x12) are transactions in the format described in the main
Ethereum specification.
"""
data = [self.cmd_map_by_name['GetTransactions']]
return self.dump_packet(data)
def dump_Blocks(self, blocks):
blocks_as_lists = [rlp.decode(b.serialize()) for b in blocks]
# FIXME, can we have a method to append rlp encoded data
data = [self.cmd_map_by_name['Blocks']] + blocks_as_lists
return self.dump_packet(data)
def dump_GetChain(self, parent_hashes=[], count=1):
"""
[0x14, Parent1, Parent2, ..., ParentN, Count]
Request the peer to send Count (to be interpreted as an integer) blocks
in the current canonical block chain that are children of Parent1
(to be interpreted as a SHA3 block hash). If Parent1 is not present in
the block chain, it should instead act as if the request were for
Parent2 &c. through to ParentN. If the designated parent is the present
block chain head, an empty reply should be sent. If none of the parents
are in the current canonical block chain, then NotInChain should be
sent along with ParentN (i.e. the last Parent in the parents list).
If no parents are passed, then a reply need not be made.
"""
data = [self.cmd_map_by_name['GetChain']] + parent_hashes + [count]
return self.dump_packet(data)
def dump_NotInChain(self, block_hash):
data = [self.cmd_map_by_name['NotInChain'], block_hash]
return self.dump_packet(data)
packeter = Packeter()
@dispatch.receiver(signals.config_ready)
def config_packeter(sender, config, **kwargs):
packeter.configure(config)<|fim▁end|> | 0x01 for PoC-2; |
<|file_name|>gauge.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -<|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# Copyright 2011 Cloudant, Inc.
from bucky.metrics.metric import Metric, MetricValue as MV
class Gauge(Metric):
def __init__(self, name):
self.name = name
self.value = 0.0
def update(self, value):
self.value = value
def clear(self):
pass
def metrics(self):
return [MV(self.name, self.value)]<|fim▁end|> | |
<|file_name|>filter_form.js<|end_file_name|><|fim▁begin|>function setupFilterForm() {
// make filter form expandable
$('#filter-panel .card-header').on('click', function() {
$('#filter-panel .card-body').toggle(200);
if($('#filter-panel').hasClass('filter-panel-bottom')) {
$('html,body').animate({<|fim▁hole|> });
$('#filter-panel .help_popover').on('click', function(event) {
event.stopPropagation();
});
$('#filter-form').on('submit', function(event) {
if($('#filter-form').serialize() !== window.location.search.substring(1)) {
// show progress indication
$('#filter-form').hide();
$('#filter-panel .card-body').append('<span id="filter-progress"><i class="fa fa-cog fa-spin fa-2x fa-fw"></i> <span>Applying filter…</span></span>');
}
});
}
function parseFilterArguments(paramHandler) {
var varPairs = window.location.search.substring(1).split('&');
var filterLabels = [];
for (var j = 0; j < varPairs.length; ++j) {
var pair = varPairs[j].split('=');
if(pair.length > 1) {
var key = decodeURIComponent(pair[0].replace(/\+/g, '%20'));
var val = decodeURIComponent(pair[1].replace(/\+/g, '%20'));
if(val.length < 1) {
continue;
}
var filterLabel = paramHandler(key, val);
if(filterLabel) {
filterLabels.push(filterLabel);
} else {
var input = $('<input/>');
input.attr('value', val);
input.attr('name', key);
input.attr('hidden', true);
$('#filter-form').append(input);
}
}
}
if(filterLabels.length > 0) {
$('#filter-panel .card-header').find('span').text('current: ' + filterLabels.join(', '));
}
return filterLabels;
}<|fim▁end|> | scrollTop: $(document).height()
});
} |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
<|fim▁hole|>
from aim import config as aim_cfg
from aim import context
from aim.db import api
from aim.tools.cli.groups import aimcli
@aimcli.aim.group(name='config')
@click.pass_context
def config(ctx):
aim_ctx = context.AimContext(store=api.get_store(expire_on_commit=True))
ctx.obj['manager'] = aim_cfg.ConfigManager(aim_ctx, '')
@config.command(name='update')
@click.argument('host', required=False)
@click.pass_context
def update(ctx, host):
"""Current database version."""
host = host or ''
ctx.obj['manager'].to_db(ctx.obj['conf'], host=host)
@config.command(name='replace')
@click.argument('host', required=False)
@click.pass_context
def replace(ctx, host):
"""Used for upgrading database."""
host = host or ''
ctx.obj['manager'].replace_all(ctx.obj['conf'], host=host)<|fim▁end|> | import click |
<|file_name|>deployer.js<|end_file_name|><|fim▁begin|>var site = function (tenantId, options) {
var tag, tags, rate, asset, assets,
carbon = require('carbon'),
store = require('/modules/store.js'),
path = '/_system/governance/sites/' + options.provider + '/' + options.name + '/' + options.version,
server = require('store').server,
//site = require('/modules/site-browser.js'),
um = server.userManager(tenantId),
registry = server.systemRegistry(tenantId),
am = store.assetManager('site', registry);
asset = {
"name": options.name,
"lifecycle": null,
"lifecycleState": null,
"attributes": {
"overview_status": options.status,
"overview_name": options.name,
"overview_version": options.version,
"overview_description": options.description,
"overview_url": options.url,
"overview_provider": options.provider,<|fim▁hole|> };
assets = am.search({
attributes:{
overview_name: options.name,
overview_provider: options.provider,
overview_version: options.version
}
}, {start: 0, count: 10});
if(assets.length > 0){
asset.id = assets[0].id;
am.update(asset);
} else {
am.add(asset);
}
um.authorizeRole(carbon.user.anonRole, path, carbon.registry.actions.GET);
tags = options.tags;
for (tag in tags) {
if (tags.hasOwnProperty(tag)) {
registry.tag(path, options.tags[tag]);
}
}
rate = options.rate;
if (options.rate != undefined) {
registry.rate(path, rate);
}
};
var ebook = function (tenantId, options) {
var tag, tags, rate, asset, assets,
carbon = require('carbon'),
store = require('/modules/store.js'),
path = '/_system/governance/ebooks/' + options.provider + '/' + options.name + '/' + options.version,
server = require('store').server,
um = server.userManager(tenantId),
registry = server.systemRegistry(tenantId),
am = store.assetManager('ebook', registry);
asset = {
"name": options.name,
"lifecycle": null,
"lifecycleState": null,
"attributes": {
"overview_status": options.status,
"overview_name": options.name,
"overview_version": options.version,
"overview_description": options.description,
"overview_url": options.url,
"overview_category": options.category,
"overview_author": options.author,
"overview_isbn": options.isbn,
"overview_provider": options.provider,
"images_banner": options.banner,
"images_thumbnail": options.thumbnail
}
};
assets = am.search({
attributes: {
overview_name: options.name,
overview_provider: options.provider,
overview_version: options.version
}
}, { start: 0, count: 10 });
if (assets.length > 0) {
asset.id = assets[0].id;
am.update(asset);
} else {
am.add(asset);
}
um.authorizeRole(carbon.user.anonRole, path, carbon.registry.actions.GET);
tags = options.tags;
for (tag in tags) {
if (tags.hasOwnProperty(tag)) {
registry.tag(path, options.tags[tag]);
}
}
rate = options.rate;
if (options.rate != undefined) {
registry.rate(path, rate);
}
}
var gadget = function (tenantId, options) {
var tag, tags, rate, asset, assets,
carbon = require('carbon'),
store = require('/modules/store.js'),
path = '/_system/governance/gadgets/' + options.provider + '/' + options.name + '/' + options.version,
server = require('store').server,
um = server.userManager(tenantId),
registry = server.systemRegistry(tenantId),
am = store.assetManager('gadget', registry);
asset = {
"name": options.name,
"lifecycle":null,
"lifecycleState":null,
"attributes": {
"overview_status": options.status,
"overview_name": options.name,
"overview_version": options.version,
"overview_description": options.description,
"overview_url": options.url,
"overview_provider": options.provider,
"images_banner": options.banner,
"images_thumbnail": options.thumbnail
}
};
assets = am.search({
attributes: {
overview_name: options.name,
overview_provider: options.provider,
overview_version: options.version
}
}, { start: 0, count: 10 });
if (assets.length > 0) {
asset.id = assets[0].id;
am.update(asset);
} else {
am.add(asset);
}
um.authorizeRole(carbon.user.anonRole, path, carbon.registry.actions.GET);
tags = options.tags;
for (tag in tags) {
if (tags.hasOwnProperty(tag)) {
registry.tag(path, options.tags[tag]);
}
}
rate = options.rate;
if (options.rate != undefined) {
registry.rate(path, rate);
}
};
var buildSiteRXT = function (options) {
var rxt = <metadata xmlns="http://www.wso2.org/governance/metadata">
<overview>
<provider>{options.provider}</provider>
<name>{options.name}</name>
<version>{options.version}</version>
<url>{options.url}</url>
<status>{options.status}</status>
<description>{options.description}</description>
</overview>
<images>
<thumbnail>{options.thumbnail}</thumbnail>
<banner>{options.banner}</banner>
</images>
</metadata>;
return rxt;
};
var buildEBookRXT = function (options) {
var rxt = <metadata xmlns="http://www.wso2.org/governance/metadata">
<overview>
<provider>{options.provider}</provider>
<name>{options.name}</name>
<version>{options.version}</version>
<url>{options.url}</url>
<status>{options.status}</status>
<category>{options.category}</category>
<isbn>{options.isbn}</isbn>
<author>{options.author}</author>
<description>{options.description}</description>
</overview>
<images>
<thumbnail>{options.thumbnail}</thumbnail>
<banner>{options.banner}</banner>
</images>
</metadata>;
return rxt;
};
var sso = function (tenantId, options) {
var path = '/_system/config/repository/identity/SAMLSSO/' + options.issuer64,
server = require('store').server,
registry = server.systemRegistry(tenantId);
registry.put(path, {
properties: {
'Issuer': options.issuer,
'SAMLSSOAssertionConsumerURL': options.consumerUrl,
'doSignAssertions': options.doSign,
'doSingleLogout': options.singleLogout,
'useFullyQualifiedUsername': options.useFQUsername
}
});
};<|fim▁end|> | "images_banner": options.banner,
"images_thumbnail": options.thumbnail
} |
<|file_name|>reviewValidation.js<|end_file_name|><|fim▁begin|>import checkEmpty from '../helpers/checkEmpty';
const validateReview = {
validateFields(req, res, next) {
const { content } = req.body;
if (checkEmpty(content)) {
return res.status(400).json({
status: 'fail',<|fim▁hole|> }
};
export default validateReview;<|fim▁end|> | message: 'Review content field cannot be empty'
});
}
next(); |
<|file_name|>disadvantaged_group_views.py<|end_file_name|><|fim▁begin|>from django.views.generic import ListView, DetailView, CreateView, \
DeleteView, UpdateView
from baseapp.models import Disadvantaged_group
from django.contrib import auth, messages
class Disadvantaged_groupView(object):
model = Disadvantaged_group
def get_template_names(self):
"""Nest templates within disadvantaged_group directory."""
tpl = super(Disadvantaged_groupView, self).get_template_names()[0]
app = self.model._meta.app_label
mdl = 'disadvantaged_group'
self.template_name = tpl.replace(app, '{0}/{1}'.format(app, mdl))
return [self.template_name]
class Disadvantaged_groupBaseListView(Disadvantaged_groupView):
paginate_by = 10
class Disadvantaged_groupCreateView(Disadvantaged_groupView, CreateView):
def get_success_url(self):
from django.core.urlresolvers import reverse
messages.add_message(
self.request,
messages.SUCCESS,"Successfully created."
)
return reverse('baseapp_disadvantaged_group_list')
class Disadvantaged_groupDeleteView(Disadvantaged_groupView, DeleteView):
def get_success_url(self):
from django.core.urlresolvers import reverse
return reverse('baseapp_disadvantaged_group_list')
class Disadvantaged_groupDetailView(Disadvantaged_groupView, DetailView):
def get_success_url(self):
from django.core.urlresolvers import reverse
return reverse('baseapp_disadvantaged_group_list')
class Disadvantaged_groupListView(Disadvantaged_groupBaseListView, ListView):
def get_success_url(self):
from django.core.urlresolvers import reverse<|fim▁hole|>
class Disadvantaged_groupUpdateView(Disadvantaged_groupView, UpdateView):
def get_success_url(self):
from django.core.urlresolvers import reverse
return reverse('baseapp_disadvantaged_group_list')<|fim▁end|> | return reverse('baseapp_disadvantaged_group_list') |
<|file_name|>image_alt.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
from holmes.validators.base import Validator
from holmes.utils import _
class ImageAltValidator(Validator):
@classmethod
def get_without_alt_parsed_value(cls, value):
result = []
for src, name in value:
data = '<a href="%s" target="_blank">%s</a>' % (src, name)
result.append(data)
return {'images': ', '.join(result)}
@classmethod
def get_alt_too_big_parsed_value(cls, value):
result = []
for src, name, alt in value['images']:
data = u'<a href="{}" alt="{}" target="_blank">{}</a>'.format(
src, alt, name
)
result.append(data)
return {
'max_size': value['max_size'],
'images': ', '.join(result)
}
@classmethod
def get_violation_definitions(cls):
return {
'invalid.images.alt': {
'title': _('Image(s) without alt attribute'),
'description': _(
'Images without alt text are not good for '
'Search Engines. Images without alt were '
'found for: %(images)s.'),
'value_parser': cls.get_without_alt_parsed_value,
'category': _('SEO'),
'generic_description': _(
'Images without alt attribute are not good for '
'search engines. They are searchable by the content '
'of this attribute, so if it\'s empty, it cause bad '
'indexing optimization.'
)
},
'invalid.images.alt_too_big': {
'title': _('Image(s) with alt attribute too big'),
'description': _(
'Images with alt text bigger than %(max_size)d chars are '
'not good for search engines. Images with a too big alt '
'were found for: %(images)s.'),
'value_parser': cls.get_alt_too_big_parsed_value,
'category': _('SEO'),
'generic_description': _(
'Images with alt text too long are not good to SEO. '
'This maximum value are configurable '
'by Holmes configuration.'
),
'unit': 'number'
}
}<|fim▁hole|>
@classmethod
def get_default_violations_values(cls, config):
return {
'invalid.images.alt_too_big': {
'value': config.MAX_IMAGE_ALT_SIZE,
'description': config.get_description('MAX_IMAGE_ALT_SIZE')
}
}
def validate(self):
max_alt_size = self.get_violation_pref('invalid.images.alt_too_big')
imgs = self.get_imgs()
result_no_alt = []
result_alt_too_big = []
for img in imgs:
src = img.get('src')
if not src:
continue
src = self.normalize_url(src)
img_alt = img.get('alt')
if src:
name = src.rsplit('/', 1)[-1]
if not img_alt:
result_no_alt.append((src, name))
elif len(img_alt) > max_alt_size:
result_alt_too_big.append((src, name, img_alt))
if result_no_alt:
self.add_violation(
key='invalid.images.alt',
value=result_no_alt,
points=20 * len(result_no_alt)
)
if result_alt_too_big:
self.add_violation(
key='invalid.images.alt_too_big',
value={
'images': result_alt_too_big,
'max_size': max_alt_size
},
points=20 * len(result_alt_too_big)
)
def get_imgs(self):
return self.review.data.get('page.all_images', None)<|fim▁end|> | |
<|file_name|>error-dialog.component.ts<|end_file_name|><|fim▁begin|>import { Component, Injectable } from '@angular/core';
import { NgxErrorDialogComponent } from '@renet-consulting/ngx-messenger';
@Injectable({
providedIn: 'root'
})
@Component({
selector: 'app-error-dialog',
templateUrl: './error-dialog.component.html',<|fim▁hole|> styleUrls: ['./error-dialog.component.scss']
})
export class ErrorDialogComponent extends NgxErrorDialogComponent { }<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import collections
<|fim▁hole|><|fim▁end|> | def make_enum(name, fields):
return collections.namedtuple(name, fields)(*range(len(fields))) |
<|file_name|>snmpraw.py<|end_file_name|><|fim▁begin|># coding=utf-8
"""
The SNMPRawCollector is designed for collecting data from SNMP-enables devices,
using a set of specified OIDs
#### Configuration
Below is an example configuration for the SNMPRawCollector. The collector
can collect data any number of devices by adding configuration sections
under the *devices* header. By default the collector will collect every 60
seconds. This might be a bit excessive and put unnecessary load on the
devices being polled. You may wish to change this to every 300 seconds. However
you need modify your graphite data retentions to handle this properly.
```
# Options for SNMPRawCollector
enabled = True
interval = 60
[devices]
# Start the device configuration
# Note: this name will be used in the metric path.
[[my-identification-for-this-host]]
host = localhost
port = 161
community = public
# Start the OID list for this device
# Note: the value part will be used in the metric path.
[[[oids]]]
1.3.6.1.4.1.2021.10.1.3.1 = cpu.load.1min
1.3.6.1.4.1.2021.10.1.3.2 = cpu.load.5min
1.3.6.1.4.1.2021.10.1.3.3 = cpu.load.15min
# If you want another host, you can. But you probably won't need it.
[[another-identification]]
host = router1.example.com
port = 161
community = public
[[[oids]]]
oid = metric.path
oid = metric.path
```
Note: If you modify the SNMPRawCollector configuration, you will need to
restart diamond.
#### Dependencies
* pysmnp (which depends on pyasn1 0.1.7 and pycrypto)
"""<|fim▁hole|>
import os
import sys
import time
sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
'snmp'))
from snmp import SNMPCollector as parent_SNMPCollector
from diamond.metric import Metric
class SNMPRawCollector(parent_SNMPCollector):
def __init__(self, *args, **kwargs):
super(SNMPRawCollector, self).__init__(*args, **kwargs)
# list to save non-existing oid's per device, to avoid repetition of
# errors in logging. restart diamond/collector to flush this
self.skip_list = []
def get_default_config(self):
"""
Override SNMPCollector.get_default_config method to provide
default_config for the SNMPInterfaceCollector
"""
default_config = super(SNMPRawCollector,
self).get_default_config()
default_config.update({
'oids': {},
'path_prefix': 'servers',
'path_suffix': 'snmp',
})
return default_config
def _precision(self, value):
"""
Return the precision of the number
"""
value = str(value)
decimal = value.rfind('.')
if decimal == -1:
return 0
return len(value) - decimal - 1
def _skip(self, device, oid, reason=None):
self.skip_list.append((device, oid))
if reason is not None:
self.log.warn('Muted \'{0}\' on \'{1}\', because: {2}'.format(
oid, device, reason))
def _get_value_walk(self, device, oid, host, port, community):
data = self.walk(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#2)')
return
self.log.debug('Data received from WALK \'{0}\': [{1}]'.format(
device, data))
if len(data) != 1:
self._skip(
device,
oid,
'unexpected response, data has {0} entries'.format(
len(data)))
return
# because we only allow 1-key dicts, we can pick with absolute index
value = data.items()[0][1]
return value
def _get_value(self, device, oid, host, port, community):
data = self.get(oid, host, port, community)
if data is None:
self._skip(device, oid, 'device down (#1)')
return
self.log.debug('Data received from GET \'{0}\': [{1}]'.format(
device, data))
if len(data) == 0:
self._skip(device, oid, 'empty response, device down?')
return
if oid not in data:
# oid is not even in hierarchy, happens when using 9.9.9.9
# but not when using 1.9.9.9
self._skip(device, oid, 'no object at OID (#1)')
return
value = data[oid]
if value == 'No Such Object currently exists at this OID':
self._skip(device, oid, 'no object at OID (#2)')
return
if value == 'No Such Instance currently exists at this OID':
return self._get_value_walk(device, oid, host, port, community)
return value
def collect_snmp(self, device, host, port, community):
"""
Collect SNMP interface data from device
"""
self.log.debug(
'Collecting raw SNMP statistics from device \'{0}\''.format(device))
dev_config = self.config['devices'][device]
if 'oids' in dev_config:
for oid, metricName in dev_config['oids'].items():
if (device, oid) in self.skip_list:
self.log.debug(
'Skipping OID \'{0}\' ({1}) on device \'{2}\''.format(
oid, metricName, device))
continue
timestamp = time.time()
value = self._get_value(device, oid, host, port, community)
if value is None:
continue
self.log.debug(
'\'{0}\' ({1}) on device \'{2}\' - value=[{3}]'.format(
oid, metricName, device, value))
path = '.'.join([self.config['path_prefix'], device,
self.config['path_suffix'], metricName])
metric = Metric(path=path, value=value, timestamp=timestamp,
precision=self._precision(value),
metric_type='GAUGE')
self.publish_metric(metric)<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
<|fim▁hole|> """This is a set of autoconf macros used by the configure.ac scripts in
other Xorg modular packages, and is needed to generate new versions
of their configure scripts with autoconf."""
homepage = "http://cgit.freedesktop.org/xorg/util/macros/"
url = "https://www.x.org/archive/individual/util/util-macros-1.19.1.tar.bz2"
version('1.19.1', '6e76e546a4e580f15cebaf8019ef1625')
version('1.19.0', '1cf984125e75f8204938d998a8b6c1e1')<|fim▁end|> |
class UtilMacros(AutotoolsPackage): |
<|file_name|>sync_rando.py<|end_file_name|><|fim▁begin|>from django.conf import settings<|fim▁hole|>from django.utils import translation
from geotrek.tourism import models as tourism_models
from geotrek.tourism.views import TouristicContentViewSet, TouristicEventViewSet
from geotrek.trekking.management.commands.sync_rando import Command as BaseCommand
# Register mapentity models
from geotrek.tourism import urls # NOQA
class Command(BaseCommand):
def sync_content(self, lang, content):
self.sync_pdf(lang, content)
for picture, resized in content.resized_pictures:
self.sync_media_file(lang, resized)
def sync_event(self, lang, event):
self.sync_pdf(lang, event)
for picture, resized in event.resized_pictures:
self.sync_media_file(lang, resized)
def sync_tourism(self, lang):
self.sync_geojson(lang, TouristicContentViewSet, 'touristiccontents')
self.sync_geojson(lang, TouristicEventViewSet, 'touristicevents')
contents = tourism_models.TouristicContent.objects.existing().order_by('pk')
contents = contents.filter(**{'published_{lang}'.format(lang=lang): True})
for content in contents:
self.sync_content(lang, content)
events = tourism_models.TouristicEvent.objects.existing().order_by('pk')
events = events.filter(**{'published_{lang}'.format(lang=lang): True})
for event in events:
self.sync_event(lang, event)
def sync(self):
super(Command, self).sync()
self.sync_static_file('**', 'tourism/touristicevent.svg')
self.sync_pictograms('**', tourism_models.InformationDeskType)
self.sync_pictograms('**', tourism_models.TouristicContentCategory)
self.sync_pictograms('**', tourism_models.TouristicContentType)
self.sync_pictograms('**', tourism_models.TouristicEventType)
for lang in settings.MODELTRANSLATION_LANGUAGES:
translation.activate(lang)
self.sync_tourism(lang)<|fim▁end|> | |
<|file_name|>test_change_project_parent.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Akvo Reporting is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from datetime import date
from akvo.rsr.tests.base import BaseTestCase
from akvo.rsr.tests.utils import ProjectFixtureBuilder
from akvo.rsr.usecases import change_project_parent as command
class ChangeProjectParentTestCase(BaseTestCase):
def test_change_parent_to_sibling(self):
# Given
root = ProjectFixtureBuilder()\
.with_title('Parent project')\
.with_disaggregations({'Foo': ['Bar']})\
.with_results([{
'title': 'Result #1',
'indicators': [{
'title': 'Indicator #1',
'periods': [{
'period_start': date(2020, 1, 1),
'period_end': date(2020, 12, 31),
}]
}]
}])\
.with_contributors([
{'title': 'Child project'},
{'title': 'New project'}
])\
.build()<|fim▁hole|> new_project = root.get_contributor(title='New project')
# When
command.change_parent(new_project.object, child_project.object)
# Then
self.assertIsNone(new_project.object.parents_all().filter(id=root.object.id).first())
self.assertIsNotNone(new_project.object.parents_all().filter(id=child_project.object.id).first())
self.assertEqual(
new_project.results.get(title='Result #1').parent_result,
child_project.results.get(title='Result #1')
)
self.assertEqual(
new_project.indicators.get(title='Indicator #1').parent_indicator,
child_project.indicators.get(title='Indicator #1')
)
self.assertEqual(
new_project.periods.get(period_start=date(2020, 1, 1)).parent_period,
child_project.periods.get(period_start=date(2020, 1, 1))
)
self.assertEqual(
new_project.object.dimension_names.get(name='Foo').parent_dimension_name,
child_project.object.dimension_names.get(name='Foo')
)
self.assertEqual(
new_project.get_disaggregation('Foo', 'Bar').parent_dimension_value,
child_project.get_disaggregation('Foo', 'Bar')
)
def test_change_parent_to_parent_sibling(self):
# Given
root = ProjectFixtureBuilder()\
.with_title('Parent project')\
.with_disaggregations({'Foo': ['Bar']})\
.with_results([{
'title': 'Result #1',
'indicators': [{
'title': 'Indicator #1',
'periods': [{
'period_start': date(2020, 1, 1),
'period_end': date(2020, 12, 31),
}]
}]
}])\
.with_contributors([
{'title': 'Child project', 'contributors': [{'title': 'Grand child project'}]},
{'title': 'New project'}
])\
.build()
child_project2 = root.get_contributor(title='New project')
grand_child = root.get_contributor(title='Grand child project')
# When
command.change_parent(grand_child.object, child_project2.object)
# Then
parents = grand_child.object.parents_all()
self.assertEqual(1, len(parents))
self.assertEqual(child_project2.object.id, parents.first().id)
self.assertEqual(
grand_child.results.get(title='Result #1').parent_result,
child_project2.results.get(title='Result #1')
)
self.assertEqual(
grand_child.indicators.get(title='Indicator #1').parent_indicator,
child_project2.indicators.get(title='Indicator #1')
)
self.assertEqual(
grand_child.periods.get(period_start=date(2020, 1, 1)).parent_period,
child_project2.periods.get(period_start=date(2020, 1, 1))
)
self.assertEqual(
grand_child.object.dimension_names.get(name='Foo').parent_dimension_name,
child_project2.object.dimension_names.get(name='Foo')
)
self.assertEqual(
grand_child.get_disaggregation('Foo', 'Bar').parent_dimension_value,
child_project2.get_disaggregation('Foo', 'Bar')
)<|fim▁end|> | child_project = root.get_contributor(title='Child project') |
<|file_name|>AutoValueProcessor.java<|end_file_name|><|fim▁begin|>package org.gsonformat.intellij.process;
import com.intellij.psi.*;
import org.apache.http.util.TextUtils;
import org.gsonformat.intellij.config.Config;
import org.gsonformat.intellij.config.Constant;<|fim▁hole|>import org.gsonformat.intellij.entity.FieldEntity;
import org.gsonformat.intellij.entity.ClassEntity;
import java.util.regex.Pattern;
/**
* Created by dim on 16/11/7.
*/
class AutoValueProcessor extends Processor {
@Override
public void onStarProcess(ClassEntity classEntity, PsiElementFactory factory, PsiClass cls,IProcessor visitor) {
super.onStarProcess(classEntity, factory, cls, visitor);
injectAutoAnnotation(factory, cls);
}
private void injectAutoAnnotation(PsiElementFactory factory, PsiClass cls) {
PsiModifierList modifierList = cls.getModifierList();
PsiElement firstChild = modifierList.getFirstChild();
Pattern pattern = Pattern.compile("@.*?AutoValue");
if (firstChild != null && !pattern.matcher(firstChild.getText()).find()) {
PsiAnnotation annotationFromText = factory.createAnnotationFromText("@com.google.auto.value.AutoValue", cls);
modifierList.addBefore(annotationFromText, firstChild);
}
if (!modifierList.hasModifierProperty(PsiModifier.ABSTRACT)) {
modifierList.setModifierProperty(PsiModifier.ABSTRACT, true);
}
}
@Override
public void generateField(PsiElementFactory factory, FieldEntity fieldEntity, PsiClass cls, ClassEntity classEntity) {
if (fieldEntity.isGenerate()) {
StringBuilder fieldSb = new StringBuilder();
String filedName = fieldEntity.getGenerateFieldName();
if (!TextUtils.isEmpty(classEntity.getExtra())) {
fieldSb.append(classEntity.getExtra()).append("\n");
classEntity.setExtra(null);
}
if (fieldEntity.getTargetClass() != null) {
fieldEntity.getTargetClass().setGenerate(true);
}
fieldSb.append(String.format("public abstract %s %s() ; ", fieldEntity.getFullNameType(), filedName));
cls.add(factory.createMethodFromText(fieldSb.toString(), cls));
}
}
@Override
public void generateGetterAndSetter(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
}
@Override
public void generateConvertMethod(PsiElementFactory factory, PsiClass cls, ClassEntity classEntity) {
super.generateConvertMethod(factory, cls, classEntity);
createMethod(factory, Constant.autoValueMethodTemplate.replace("$className$", cls.getName()).trim(), cls);
}
@Override
protected void onEndGenerateClass(PsiElementFactory factory, ClassEntity classEntity, PsiClass parentClass, PsiClass generateClass, IProcessor visitor) {
super.onEndGenerateClass(factory, classEntity, parentClass, generateClass, visitor);
injectAutoAnnotation(factory, generateClass);
}
}<|fim▁end|> | |
<|file_name|>schema.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | #![allow(dead_code)]
include!(concat!(env!("OUT_DIR"), "/infer_schema.rs")); |
<|file_name|>server.rs<|end_file_name|><|fim▁begin|>extern crate gj;
extern crate gjio;
extern crate slab;
use std::io::{Error, ErrorKind};
use std::rc::{Rc, Weak};
use std::cell::{Cell, RefCell};
use slab::Slab;
use gj::{EventLoop, Promise, TaskReaper, TaskSet};
use gjio::{SocketStream, AsyncRead, AsyncWrite};
struct WriteQueue {
task: Promise<(SocketStream, Bomb), Error>,
len: Rc<Cell<usize>>,
}
impl WriteQueue {
fn new() -> WriteQueue {
WriteQueue {
task: Promise::err(Error::new(ErrorKind::Other, "uninitialized")),
len: Rc::new(Cell::new(0)),
}
}
fn init(&mut self, idx: usize, subscribers: &Rc<RefCell<Slab<WriteQueue>>>,
stream: SocketStream ) {
self.task = Promise::ok((stream, Bomb {
subscribers: Rc::downgrade(subscribers),
idx: idx
}));
}
fn len(&self) -> usize {
self.len.get()
}
fn send(&mut self, message: Vec<u8>) {
let task = ::std::mem::replace(&mut self.task, Promise::err(Error::new(ErrorKind::Other, "uninitialized")));
self.len.set(self.len.get() + 1);
let len = self.len.clone();
self.task = task.then(move |(mut stream, bomb)| {
let header = vec![message.len() as u8];
stream.write(header).then(move |_| {
stream.write(message).then(move |_| {
len.set(len.get() - 1);
Promise::ok((stream, bomb))
})
})
});
}
}
struct Bomb {
subscribers: Weak<RefCell<Slab<WriteQueue>>>,
idx: usize,
}
impl Drop for Bomb {
fn drop(&mut self) {
match self.subscribers.upgrade() {
Some(s) => {
s.borrow_mut().remove(self.idx).unwrap();
}
None => (),
}
}
}
fn handle_publisher(mut stream: SocketStream, messages_received: u64,
subscribers: Rc<RefCell<Slab<WriteQueue>>>) -> Promise<(), Error> {
stream.try_read(vec![0], 1).then(move |(buf, n)| {
if n == 0 {
// EOF
Promise::ok(())
} else {
let len = buf[0] as usize;
let body = vec![0u8; len];
stream.read(body, len).then(move |(buf, _)| {
for ref mut write_queue in subscribers.borrow_mut().iter_mut() {
if write_queue.len() < 5 {
write_queue.send(buf.clone());
}
}
handle_publisher(stream, messages_received + 1, subscribers)
})
}
})
}
fn handle_connection(stream: SocketStream,
subscribers: Rc<RefCell<Slab<WriteQueue>>>)
-> Promise<(), Error> {
let read_stream = stream.clone();
let write_queue = WriteQueue::new();
if !subscribers.borrow().has_available() {
let len = subscribers.borrow().len();
subscribers.borrow_mut().reserve_exact(len);
}
let idx = match subscribers.borrow_mut().insert(write_queue) {
Ok(idx) => idx,
Err(_) => unreachable!(),
};
match subscribers.borrow_mut().get_mut(idx) {
Some(ref mut q) => q.init(idx, &subscribers, stream),
None => unreachable!(),
}
handle_publisher(read_stream, 0, subscribers)
}
fn accept_loop(listener: gjio::SocketListener,
mut task_set: TaskSet<(), ::std::io::Error>,
subscribers: Rc<RefCell<Slab<WriteQueue>>>)
-> Promise<(), ::std::io::Error>
{
listener.accept().then(move |stream| {
task_set.add(handle_connection(stream, subscribers.clone()));
accept_loop(listener, task_set, subscribers)
})
}
struct Reaper;
impl TaskReaper<(), ::std::io::Error> for Reaper {
fn task_failed(&mut self, error: ::std::io::Error) {
println!("Task failed: {}", error);
}
}
pub fn main() {
let args: Vec<String> = ::std::env::args().collect();<|fim▁hole|> }
EventLoop::top_level(move |wait_scope| -> Result<(), Box<::std::error::Error>> {
let mut event_port = try!(gjio::EventPort::new());
let network = event_port.get_network();
let addr_str = &args[1];
let addr = try!(addr_str.parse::<::std::net::SocketAddr>());
let mut address = network.get_tcp_address(addr);
let listener = try!(address.listen());
println!("listening on {}", addr_str);
let reaper = Box::new(Reaper);
let subscribers: Rc<RefCell<Slab<WriteQueue>>> =
Rc::new(RefCell::new(Slab::with_capacity(1024)));
try!(accept_loop(listener, TaskSet::new(reaper), subscribers).wait(wait_scope, &mut event_port));
Ok(())
}).expect("top level");
}<|fim▁end|> | if args.len() != 2 {
println!("usage: {} HOST:PORT", args[0]);
return; |
<|file_name|>test_shipments.py<|end_file_name|><|fim▁begin|># This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import decimal
import pytest
from django.conf import settings
from shuup.core.models import Shipment, ShippingStatus, StockBehavior
from shuup.testing.factories import (
add_product_to_order, create_empty_order, create_product,
get_default_shop, get_default_supplier
)
from shuup.utils.excs import Problem
@pytest.mark.django_db
def test_shipment_identifier():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
shipment = order.create_shipment({line.product: 1}, supplier=supplier)
expected_key_start = "%s/%s" % (order.pk, i)
assert shipment.identifier.startswith(expected_key_start)
assert order.shipments.count() == int(line.quantity)
assert order.shipping_status == ShippingStatus.FULLY_SHIPPED # Check that order is now fully shipped
assert not order.can_edit()
@pytest.mark.django_db
def test_shipment_creation_from_unsaved_shipment():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
unsaved_shipment = Shipment(order=order, supplier=supplier)
shipment = order.create_shipment({line.product: 1}, shipment=unsaved_shipment)
expected_key_start = "%s/%s" % (order.pk, i)
assert shipment.identifier.startswith(expected_key_start)
assert order.shipments.count() == int(line.quantity)
@pytest.mark.django_db
def test_shipment_creation_without_supplier_and_shipment():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
with pytest.raises(AssertionError):
order.create_shipment({line.product: 1})
assert order.shipments.count() == 0<|fim▁hole|> shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
second_order = create_empty_order(shop=shop)
second_order.full_clean()
second_order.save()
product_lines = order.lines.exclude(product_id=None)
for line in product_lines:
for i in range(0, int(line.quantity)):
with pytest.raises(AssertionError):
unsaved_shipment = Shipment(supplier=supplier, order=second_order)
order.create_shipment({line.product: 1}, shipment=unsaved_shipment)
assert order.shipments.count() == 0
@pytest.mark.django_db
def test_partially_shipped_order_status():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
assert order.can_edit()
first_product_line = order.lines.exclude(product_id=None).first()
assert first_product_line.quantity > 1
order.create_shipment({first_product_line.product: 1}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert not order.can_edit()
@pytest.mark.django_db
def test_shipment_delete():
shop = get_default_shop()
supplier = get_default_supplier()
order = _get_order(shop, supplier)
assert order.can_edit()
first_product_line = order.lines.exclude(product_id=None).first()
assert first_product_line.quantity > 1
shipment = order.create_shipment({first_product_line.product: 1}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert order.shipments.all().count() == 1
# Test shipment delete
shipment.soft_delete()
assert order.shipments.all().count() == 1
assert order.shipments.all_except_deleted().count() == 0
# Check the shipping status update
assert order.shipping_status == ShippingStatus.NOT_SHIPPED
@pytest.mark.django_db
def test_shipment_with_insufficient_stock():
if "shuup.simple_supplier" not in settings.INSTALLED_APPS:
pytest.skip("Need shuup.simple_supplier in INSTALLED_APPS")
from shuup_tests.simple_supplier.utils import get_simple_supplier
shop = get_default_shop()
supplier = get_simple_supplier()
order = _get_order(shop, supplier, stocked=True)
product_line = order.lines.products().first()
product = product_line.product
assert product_line.quantity == 15
supplier.adjust_stock(product.pk, delta=10)
stock_status = supplier.get_stock_status(product.pk)
assert stock_status.physical_count == 10
order.create_shipment({product: 5}, supplier=supplier)
assert order.shipping_status == ShippingStatus.PARTIALLY_SHIPPED
assert order.shipments.all().count() == 1
with pytest.raises(Problem):
order.create_shipment({product: 10}, supplier=supplier)
# Should be fine after adding more stock
supplier.adjust_stock(product.pk, delta=5)
order.create_shipment({product: 10}, supplier=supplier)
def _get_order(shop, supplier, stocked=False):
order = create_empty_order(shop=shop)
order.full_clean()
order.save()
for product_data in _get_product_data(stocked):
quantity = product_data.pop("quantity")
product = create_product(
sku=product_data.pop("sku"),
shop=shop,
supplier=supplier,
default_price=3.33,
**product_data)
add_product_to_order(order, supplier, product, quantity=quantity, taxless_base_unit_price=1)
order.cache_prices()
order.check_all_verified()
order.save()
return order
def _get_product_data(stocked=False):
return [
{
"sku": "sku1234",
"net_weight": decimal.Decimal("1"),
"gross_weight": decimal.Decimal("43.34257"),
"quantity": decimal.Decimal("15"),
"stock_behavior": StockBehavior.STOCKED if stocked else StockBehavior.UNSTOCKED
}
]<|fim▁end|> |
@pytest.mark.django_db
def test_shipment_creation_with_invalid_unsaved_shipment(): |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
#![doc(
html_logo_url = "https://raw.githubusercontent.com/rusoto/rusoto/master/assets/logo-square.png"
)]
//! <p>Amazon EMR is a web service that makes it easier to process large amounts of data efficiently. Amazon EMR uses Hadoop processing combined with several AWS services to do tasks such as web indexing, data mining, log file analysis, machine learning, scientific simulation, and data warehouse management.</p>
//!<|fim▁hole|>mod custom;
mod generated;
pub use custom::*;
pub use generated::*;<|fim▁end|> | //! If you're using the service, you're probably looking for [EmrClient](struct.EmrClient.html) and [Emr](trait.Emr.html).
|
<|file_name|>0006_experience_temp_subtypes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-21 13:23
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('exdb', '0005_swap_type_subtype'),
]
operations = [
migrations.AddField(
model_name='experience',
name='temp_subtypes',
field=models.ManyToManyField(blank=True, related_name='experience_set', to='exdb.Subtype'),
),<|fim▁hole|><|fim▁end|> | ] |
<|file_name|>ijrois.py<|end_file_name|><|fim▁begin|># Copyright: Luis Pedro Coelho <[email protected]>, 2012-2018
# License: MIT
import numpy as np
def read_roi(fileobj):
'''
points = read_roi(fileobj)
Read ImageJ's ROI format
Parameters
----------
fileobj: should be a file-like object
Returns
-------
points: a list of points
'''
# This is based on:
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiDecoder.java.html
# http://rsbweb.nih.gov/ij/developer/source/ij/io/RoiEncoder.java.html
SPLINE_FIT = 1
DOUBLE_HEADED = 2
OUTLINE = 4
OVERLAY_LABELS = 8
OVERLAY_NAMES = 16
OVERLAY_BACKGROUNDS = 32
OVERLAY_BOLD = 64
SUB_PIXEL_RESOLUTION = 128
DRAW_OFFSET = 256
pos = [4]
def get8():
pos[0] += 1
s = fileobj.read(1)
if not s:
raise IOError('readroi: Unexpected EOF')
return ord(s)
def get16():
b0 = get8()
b1 = get8()
return (b0 << 8) | b1
def get32():
s0 = get16()
s1 = get16()
return (s0 << 16) | s1
def getfloat():
v = np.int32(get32())
return v.view(np.float32)
magic = fileobj.read(4)
if magic != b'Iout':
raise IOError('Magic number not found')
version = get16()
<|fim▁hole|> roi_type = get8()
# Discard second Byte:
get8()
if not (0 <= roi_type < 11):
raise ValueError('roireader: ROI type %s not supported' % roi_type)
if roi_type != 7:
raise ValueError('roireader: ROI type %s not supported (!= 7)' % roi_type)
top = get16()
left = get16()
bottom = get16()
right = get16()
n_coordinates = get16()
x1 = getfloat()
y1 = getfloat()
x2 = getfloat()
y2 = getfloat()
stroke_width = get16()
shape_roi_size = get32()
stroke_color = get32()
fill_color = get32()
subtype = get16()
if subtype != 0:
raise ValueError('roireader: ROI subtype {} not supported (!= 0)'.format(subtype))
options = get16()
arrow_style = get8()
arrow_head_size = get8()
rect_arc_size = get16()
position = get32()
header2offset = get32()
if options & SUB_PIXEL_RESOLUTION:
getc = getfloat
points = np.empty((n_coordinates, 2), dtype=np.float32)
else:
getc = get16
points = np.empty((n_coordinates, 2), dtype=np.int16)
points[:,1] = [getc() for i in range(n_coordinates)]
points[:,0] = [getc() for i in range(n_coordinates)]
points[:,1] += left
points[:,0] += top
points -= 1
return points
def read_roi_zip(fname):
'''
Reads all ROIs in a ZIP file
Parameters
----------
fname : str
Input filename
Returns
-------
rois: list of ROIs
Each ROI is a vector of 2D points
See Also
--------
read_roi: function, reads a single ROI
'''
import zipfile
with zipfile.ZipFile(fname) as zf:
return [read_roi(zf.open(n))
for n in zf.namelist()]<|fim▁end|> | # It seems that the roi type field occupies 2 Bytes, but only one is used |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from __future__ import division, print_function
from os.path import join, split, dirname
import os
import sys
from distutils.dep_util import newer
from distutils.msvccompiler import get_build_version as get_msvc_build_version
def needs_mingw_ftime_workaround():
# We need the mingw workaround for _ftime if the msvc runtime version is
# 7.1 or above and we build with mingw ...
# ... but we can't easily detect compiler version outside distutils command
# context, so we will need to detect in randomkit whether we build with gcc
msver = get_msvc_build_version()
if msver and msver >= 8:
return True
return False
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration, get_mathlibs
config = Configuration('random', parent_package, top_path)
def generate_libraries(ext, build_dir):
config_cmd = config.get_config_cmd()
libs = get_mathlibs()
tc = testcode_wincrypt()
if config_cmd.try_run(tc):
libs.append('Advapi32')
ext.libraries.extend(libs)
return None
# enable unix large file support on 32 bit systems
# (64 bit off_t, lseek -> lseek64 etc.)
defs = [('_FILE_OFFSET_BITS', '64'),
('_LARGEFILE_SOURCE', '1'),
('_LARGEFILE64_SOURCE', '1'),
]
if needs_mingw_ftime_workaround():
defs.append(("NPY_NEEDS_MINGW_TIME_WORKAROUND", None))
libs = []
# Configure mtrand
try:
import cffi
have_cffi = True
except ImportError:
have_cffi = False
if have_cffi:
#create the dll/so for the cffi version
if sys.platform == 'win32':
libs.append('Advapi32')
defs.append(('_MTRAND_DLL',None))
config.add_shared_library('_mtrand',
sources=[join('mtrand', x) for x in
['randomkit.c', 'distributions.c', 'initarray.c']],
build_info = {
'libraries': libs,
'depends': [join('mtrand', '*.h'),
],
'macros': defs,
}
)
else:<|fim▁hole|> 'distributions.c']]+[generate_libraries],
libraries=libs,
depends=[join('mtrand', '*.h'),
join('mtrand', '*.pyx'),
join('mtrand', '*.pxi'),],
define_macros=defs,
)
config.add_data_files(('.', join('mtrand', 'randomkit.h')))
config.add_data_dir('tests')
return config
def testcode_wincrypt():
return """\
/* check to see if _WIN32 is defined */
int main(int argc, char *argv[])
{
#ifdef _WIN32
return 0;
#else
return 1;
#endif
}
"""
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(configuration=configuration)<|fim▁end|> | config.add_extension('mtrand',
sources=[join('mtrand', x) for x in
['mtrand.c', 'randomkit.c', 'initarray.c', |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#![feature(box_syntax)]
#![feature(fnbox)]
#![feature(mpsc_select)]
#![feature(plugin)]
#![feature(plugin)]
#![plugin(plugins)]
extern crate brotli;
extern crate cookie as cookie_rs;
extern crate devtools_traits;
extern crate flate2;
extern crate hyper;
extern crate immeta;
extern crate ipc_channel;
#[macro_use]
extern crate log;
extern crate mime_guess;
extern crate msg;
extern crate net_traits;
extern crate openssl;
extern crate rustc_serialize;
extern crate time;
extern crate url;
extern crate util;
extern crate uuid;
extern crate websocket;
pub mod about_loader;
pub mod cookie;
pub mod cookie_storage;
pub mod data_loader;
pub mod file_loader;
pub mod hsts;
pub mod http_loader;
pub mod image_cache_thread;
pub mod mime_classifier;
pub mod pub_domains;
pub mod resource_thread;
pub mod storage_thread;
pub mod websocket_loader;
/// An implementation of the [Fetch spec](https://fetch.spec.whatwg.org/)
pub mod fetch {<|fim▁hole|> #![allow(dead_code, unused)] // XXXManishearth this is only temporary until the Fetch mod starts being used
pub mod cors_cache;
pub mod methods;
pub mod response;
}<|fim▁end|> | |
<|file_name|>infrastructure_stack.py<|end_file_name|><|fim▁begin|># Copyright 2014 - Numergy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy as sa
from solum.objects import infrastructure_stack as abstract
from solum.objects.sqlalchemy import models as sql
class InfrastructureStack(sql.Base, abstract.InfrastructureStack):
"""Represent an infrastructure_stack in sqlalchemy."""
__tablename__ = 'infrastructure_stack'
__resource__ = 'infrastructure/stacks'
__table_args__ = sql.table_args()
id = sa.Column(sa.Integer, primary_key=True, autoincrement=True)
uuid = sa.Column(sa.String(36), nullable=False)
project_id = sa.Column(sa.String(36))
user_id = sa.Column(sa.String(36))
image_id = sa.Column(sa.String(36))
heat_stack_id = sa.Column(sa.String(36))
name = sa.Column(sa.String(100))
description = sa.Column(sa.String(255))
tags = sa.Column(sa.Text)
class InfrastructureStackList(abstract.InfrastructureStackList):
"""Represent a list of infrastructure_stacks in sqlalchemy."""
@classmethod
def get_all(cls, context):
return InfrastructureStackList(sql.model_query(context,
InfrastructureStack))<|fim▁end|> | # Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT |
<|file_name|>rn.js<|end_file_name|><|fim▁begin|>define(function () {'use strict';
function rn (n) {
return n.toLowerCase().replace(/a$/, '');
}
return rn;
<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>el.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2009,2011 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""This module represents the Greek language.
.. seealso:: http://en.wikipedia.org/wiki/Greek_language
"""
from __future__ import unicode_literals
import re
from translate.lang import common
from translate.misc.dictutils import ordereddict
class el(common.Common):
"""This class represents Greek."""
# Greek uses ; as question mark and the middot instead
sentenceend = ".!;…"
sentencere = re.compile(r"""
(?s) # make . also match newlines
.*? # anything, but match non-greedy
[%s] # the puntuation for sentence ending
\s+ # the spacing after the puntuation
(?=[^a-zά-ώ\d]) # lookahead that next part starts with caps
""" % sentenceend, re.VERBOSE | re.UNICODE)
puncdict = ordereddict([
(";", "·"),
("?", ";"),
])
# Valid latin characters for use as accelerators
valid_latin_accel = ("abcdefghijklmnopqrstuvwxyz"
"ABCDEFGHIJKLMNOPQRSTUVWXYZ"
"1234567890")
# Valid greek characters for use as accelerators (accented characters
# and "ς" omitted)
valid_greek_accel = ("αβγδεζηθικλμνξοπρστυφχψω"<|fim▁hole|> validaccel = "".join([valid_latin_accel, valid_greek_accel])<|fim▁end|> | "ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ")
# Valid accelerators |
<|file_name|>EmbeddableIDTestServlet.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2021 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.ibm.ws.jpa.fvt.entity.tests.web;
import java.util.HashMap;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceUnit;
import javax.servlet.annotation.WebServlet;
import org.junit.Test;
import com.ibm.ws.jpa.fvt.entity.testlogic.EmbeddableIDTestLogic;
import com.ibm.ws.testtooling.testinfo.JPAPersistenceContext;
import com.ibm.ws.testtooling.testinfo.JPAPersistenceContext.PersistenceContextType;
import com.ibm.ws.testtooling.testinfo.JPAPersistenceContext.PersistenceInjectionType;
import com.ibm.ws.testtooling.vehicle.web.JPATestServlet;
@SuppressWarnings("serial")
@WebServlet(urlPatterns = "/EmbeddableIDTestServlet")
public class EmbeddableIDTestServlet extends JPATestServlet {
// Container Managed Transaction Scope
@PersistenceContext(unitName = "ENTITY_JTA")
private EntityManager cmtsEm;
// Application Managed JTA
@PersistenceUnit(unitName = "ENTITY_JTA")
private EntityManagerFactory amjtaEmf;
// Application Managed Resource-Local
@PersistenceUnit(unitName = "ENTITY_RL")
private EntityManagerFactory amrlEmf;
@PostConstruct
private void initFAT() {
testClassName = EmbeddableIDTestLogic.class.getName();
jpaPctxMap.put("test-jpa-resource-amjta",
new JPAPersistenceContext("test-jpa-resource-amjta", PersistenceContextType.APPLICATION_MANAGED_JTA, PersistenceInjectionType.FIELD, "amjtaEmf"));
jpaPctxMap.put("test-jpa-resource-amrl",
new JPAPersistenceContext("test-jpa-resource-amrl", PersistenceContextType.APPLICATION_MANAGED_RL, PersistenceInjectionType.FIELD, "amrlEmf"));
jpaPctxMap.put("test-jpa-resource-cmts",
new JPAPersistenceContext("test-jpa-resource-cmts", PersistenceContextType.CONTAINER_MANAGED_TS, PersistenceInjectionType.FIELD, "cmtsEm"));
}
@Test
public void jpa10_Entity_EmbeddableID_Ano_AMJTA_Web() throws Exception {
final String testName = "jpa10_Entity_EmbeddableID_Ano_AMJTA_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-amjta";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "EmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
@Test
public void jpa10_Entity_EmbeddableID_XML_AMJTA_Web() throws Exception {
final String testName = "jpa10_Entity_EmbeddableID_XML_AMJTA_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-amjta";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "XMLEmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
@Test
public void jpa10_Entity_EmbeddableID_Ano_AMRL_Web() throws Exception {
final String testName = "jpa10_Entity_EmbeddableID_Ano_AMRL_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-amrl";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "EmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
@Test
public void jpa10_Entity_EmbeddableID_XML_AMRL_Web() throws Exception {
final String testName = "jpa10_Entity_EmbeddableID_XML_AMRL_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-amrl";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "XMLEmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
@Test
public void jpa10_Entity_EmbeddableID_Ano_CMTS_Web() throws Exception {
final String testName = "jpa10_Entity_EmbeddableID_Ano_CMTS_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-cmts";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "EmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
@Test
public void jpa10_Entity_EmbeddableID_XML_CMTS_Web() throws Exception {<|fim▁hole|> final String testName = "jpa10_Entity_EmbeddableID_XML_CMTS_Web";
final String testMethod = "testEmbeddableIDClass001";
final String testResource = "test-jpa-resource-cmts";
HashMap<String, java.io.Serializable> properties = new HashMap<String, java.io.Serializable>();
properties.put("EntityName", "XMLEmbeddableIdEntity");
executeTest(testName, testMethod, testResource, properties);
}
}<|fim▁end|> | |
<|file_name|>main_test.go<|end_file_name|><|fim▁begin|>package main
import (
"testing"<|fim▁hole|>2. identify general edge cases
3. identify edge cases specific on the implementation
*/
func TestSolution(t *testing.T) {
for i, tc := range []struct{
A, B []int
expected int
}{
// their example
{[]int{4, 3, 2, 1, 5}, []int{0, 1, 0, 0, 0}, 2},
// edge case: every fish survives because they swim in the same direction
{[]int{4, 3, 2}, []int{1, 1, 1}, 3},
{[]int{4, 3, 2}, []int{0, 0, 0}, 3},
// edge case: every fish has the same size, so they don't eat each other.
{[]int{4, 4, 4, 4}, []int{1, 0, 1, 0}, 4},
// tests
{[]int{2, 1, 3, 2}, []int{1, 0, 1, 0}, 2},
{[]int{3, 2, 1, 4}, []int{1, 1, 1, 0}, 1},
}{
actual := Solution(tc.A, tc.B)
if actual != tc.expected {
t.Errorf("case %d: (%v, %v) expected %d, got %d", i, tc.A, tc.B, tc.expected, actual)
}
}
}<|fim▁end|> | )
/*
1. imagine the usual scenario |
<|file_name|>htmltitleelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::HTMLTitleElementBinding;
use dom::bindings::codegen::Bindings::HTMLTitleElementBinding::HTMLTitleElementMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::Root;
use dom::bindings::str::DOMString;
use dom::characterdata::CharacterData;
use dom::document::Document;
use dom::htmlelement::HTMLElement;
use dom::node::{ChildrenMutation, Node};
use dom::text::Text;
use dom::virtualmethods::VirtualMethods;
use html5ever_atoms::LocalName;
#[dom_struct]
pub struct HTMLTitleElement {
htmlelement: HTMLElement,
}
impl HTMLTitleElement {
fn new_inherited(local_name: LocalName, prefix: Option<DOMString>, document: &Document) -> HTMLTitleElement {
HTMLTitleElement {
htmlelement: HTMLElement::new_inherited(local_name, prefix, document)
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLTitleElement> {
Node::reflect_node(box HTMLTitleElement::new_inherited(local_name, prefix, document),
document,
HTMLTitleElementBinding::Wrap)
}
}
impl HTMLTitleElementMethods for HTMLTitleElement {
// https://html.spec.whatwg.org/multipage/#dom-title-text
fn Text(&self) -> DOMString {
let mut content = String::new();
for child in self.upcast::<Node>().children() {
if let Some(text) = child.downcast::<Text>() {
content.push_str(&text.upcast::<CharacterData>().data());
}
}
DOMString::from(content)
}
// https://html.spec.whatwg.org/multipage/#dom-title-text
fn SetText(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value))<|fim▁hole|>
impl VirtualMethods for HTMLTitleElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
let node = self.upcast::<Node>();
if node.is_in_doc() {
node.owner_doc().title_changed();
}
}
fn bind_to_tree(&self, is_in_doc: bool) {
let node = self.upcast::<Node>();
if is_in_doc {
node.owner_doc().title_changed();
}
}
}<|fim▁end|> | }
} |
<|file_name|>tokenize.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate debug;
extern crate html5;
use std::io;
use std::char;
use std::default::Default;
use html5::tokenizer::{TokenSink, Token, TokenizerOpts, ParseError};
use html5::tokenizer::{CharacterTokens, NullCharacterToken, TagToken, StartTag, EndTag};
use html5::driver::{tokenize_to, one_input};
struct TokenPrinter {
in_char_run: bool,
}
impl TokenPrinter {
fn is_char(&mut self, is_char: bool) {
match (self.in_char_run, is_char) {
(false, true ) => print!("CHAR : \""),
(true, false) => println!("\""),
_ => (),
}
self.in_char_run = is_char;
}
fn do_char(&mut self, c: char) {
self.is_char(true);
char::escape_default(c, |d| print!("{:c}", d));
}
}
impl TokenSink for TokenPrinter {
fn process_token(&mut self, token: Token) {
match token {
CharacterTokens(b) => {
for c in b.as_slice().chars() {
self.do_char(c);
}
}
NullCharacterToken => self.do_char('\0'),
TagToken(tag) => {
self.is_char(false);
// This is not proper HTML serialization, of course.
match tag.kind {
StartTag => print!("TAG : <\x1b[32m{:s}\x1b[0m", tag.name),
EndTag => print!("TAG : <\x1b[31m/{:s}\x1b[0m", tag.name),
}
for attr in tag.attrs.iter() {
print!(" \x1b[36m{:s}\x1b[0m='\x1b[34m{:s}\x1b[0m'", attr.name, attr.value);<|fim▁hole|> }
println!(">");
}
ParseError(err) => {
self.is_char(false);
println!("ERROR: {:s}", err);
}
_ => {
self.is_char(false);
println!("OTHER: {}", token);
}
}
}
}
fn main() {
let mut sink = TokenPrinter {
in_char_run: false,
};
let input = io::stdin().read_to_str().unwrap();
tokenize_to(&mut sink, one_input(input), TokenizerOpts {
profile: true,
.. Default::default()
});
sink.is_char(false);
}<|fim▁end|> | }
if tag.self_closing {
print!(" \x1b[31m/\x1b[0m"); |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render, redirect
from django.http import HttpResponseRedirect
from .models import SupportProject
# Create your views here.<|fim▁hole|>def index( request ):
sp = SupportProject.objects.all()
if sp.count() == 1:
return HttpResponseRedirect( sp.first().project.get_absolute_url() )
else:
context_dict = { 'sps' : sp, }
return render( request, 'support/index.html', context_dict )<|fim▁end|> | |
<|file_name|>shim.go<|end_file_name|><|fim▁begin|>package types
import (
"context"
pb "btrfs_to_glacier/messages"
)
// Represents a line in /proc/self/mountinfo
// Many be enriched from info from other places ...
type MountEntry struct {
Id int // NOT stable across reads of mountinfo
Device *Device
TreePath string
MountedPath string
FsType string
Options map[string]string // correspond to the per-superblock options
BtrfsVolId uint64
// Bind mounts to the same filesystem/subvolume
Binds []*MountEntry
}
type Device struct {<|fim▁hole|> GptUuid string // Optional
}
type Filesystem struct {
Uuid string
Label string
Devices []*Device
Mounts []*MountEntry
}
type Linuxutil interface {
// Returns true if this process is running with CAP_SYS_ADMIN privileges.
// Many btrfs operations require this.
IsCapSysAdmin() bool
// The same as what you would get with `uname -a`.
LinuxKernelVersion() (uint32, uint32)
// The build version in the btrfs-progs header we are linking against.
BtrfsProgsVersion() (uint32, uint32)
// The git commit hash from which the current binary was built.
ProjectVersion() string
// Drops root privileges or dies if `seteuid` clib call fails.
// Only works if go binary invoked via `sudo`.
// Returns a function that can be called to restore back privileges.
DropRoot() (func(), error)
// Obtains root privileges back or dies if `seteuid` clib call fails.
// Only works if go binary invoked via `sudo`.
// Returns a function that can be called to restore user permissions.
GetRoot() (func(), error)
// Mounts the device and checks it got mounted at desired path.
// If device is already mounted at target, this is a noop.
// The device needs to be mountable by the user in /etc/fstab.
// CAP_SYS_ADMIN will not be acquired.
Mount(context.Context, string, string) (*MountEntry, error)
UMount(context.Context, string) error
// Returns all mounts found on the host that are backed by a block device.
// Caveats:
// * Bind mounts are NOT deduplicated.
// * Mounts assotiated to multiple devices (ex: btrfs raid) will only have device assotiated
// * Retrieves each mount `FsUUID` and `GptUUID` (if available: /dev/mapper does not have a GptUuid)
ListBlockDevMounts() ([]*MountEntry, error)
// Returns all btrfs filesystems found on the host.
// Bind mounts to the same subvolume are deduplicated.
// For each filesystem list all the mounts it owns.
ListBtrfsFilesystems() ([]*Filesystem, error)
}
type Btrfsutil interface {
// Get the `struct btrfs_util_subvolume_info` for a btrfs subvolume.
// If `path` does not point to a snapshot the corresponding fields will be empty.
// @path must be the root of the subvolume.
SubVolumeInfo(path string) (*pb.SubVolume, error)
// Returns the btrfs filesystem ID for the subvolume that owns `path`.
// Works for any path under the volume.
SubVolumeIdForPath(path string) (uint64, error)
// Returns an error unless `path` is the root of a btrfs subvolume.
// It works even on the root subvolume.
IsSubVolumeMountPath(path string) error
// Returns the TreePath of a volume in its btrfs filesystem.
// Requires the argument to have a valid MountedPath (it can work with a path inside the volume).
// Requires CAP_SYS_ADMIN.
GetSubVolumeTreePath(*pb.SubVolume) (string, error)
// Returns a list with all subvolumes in the filesystem that owns `path`.
// If `is_root_fs` then `path` must be the filesystem root and this method can be called without CAP_SYS_ADMIN.
// Otherwise listing on non-root paths can only be done by root.
// If the subvolume is not a snapshot then the corresponding fields will be empty.
ListSubVolumesInFs(path string, is_root_fs bool) ([]*pb.SubVolume, error)
// Reads a stream generated from `btrfs send --no-data` and returns a record of the operations.
// Takes ownership of `read_pipe` and will close it once done.
ReadAndProcessSendStream(dump ReadEndIf) (*SendDumpOperations, error)
// Starts a separate `btrfs send` and returns the read end of the pipe.
// `no_data` is the same option as for `btrfs send`.
// `from` can be null to get the full contents of the subvolume.
// When `ctx` is done/cancelled the write end of the pipe should be closed and the forked process killed.
StartSendStream(ctx context.Context, from string, to string, no_data bool) (ReadEndIf, error)
// Wrapper around `btrfs receive`. `to_dir` must exist and be a directory.
// The mounted path of the received subvol will be `to_dir/<basename_src_subvol>`.
// Takes ownership of `read_pipe` and will close it once done.
ReceiveSendStream(ctx context.Context, to_dir string, read_pipe ReadEndIf) error
// Calls `btrfs_util_create_snapshot()` to create a snapshot of `subvol` in `snap` path.
// Sets the read-only flag.
// Note async subvolume is no longer possible.
CreateSnapshot(subvol string, snap string) error
// Calls `btrfs_util_delete_subvolume` with empty `flags` argument.
DeleteSubVolume(subvol string) error
// Calls `btrfs_util_start_sync()` to wait for a transaction to sync.
WaitForTransactionId(root_fs string, tid uint64) error
}<|fim▁end|> | Name string
MapperGroup string // if device belongs to a virtual block device
Minor, Major int // Unreliable when taken from /proc/self/mountinfo
FsUuid string // Optional |
<|file_name|>eq_report_stock.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Odoo Addon, Open Source Management Solution
# Copyright (C) 2014-now Equitania Software GmbH(<http://www.equitania.de>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the<|fim▁hole|># License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import models, fields, api, _
class report_stock_picking(models.Model):
_inherit = 'stock.picking'
# def get_tax(self, tax_id, language, currency_id):
# amount_net = 0;
# for line in self.order_line:
# if tax_id.id in [x.id for x in line.tax_id] and not line.eq_optional:
# amount_net += line.price_subtotal
#
# tax_amount = 0
# for tex in self.env['account.tax']._compute([tax_id], amount_net, 1):
# tax_amount += tex['amount']
#
# return self.env["eq_report_helper"].get_price(tax_amount, language, 'Sale Price Report', currency_id)
#
#
# @api.multi
# def get_price(self, value, currency_id, language):
# """
# Formatierung eines Preises mit Berücksichtigung der Einstellung Dezimalstellen Sale Price Report
# :param value:
# :param currency_id:
# :param language:
# :return:
# """
# return self.env["eq_report_helper"].get_price(value, language, 'Sale Price Report', currency_id)
#
# @api.multi
# def get_qty(self, value, language):
# """
# Formatierung für Mengenangabe mit Berücksichtigung der Einstellung Dezimalstellen Sale Quantity Report
# :param value:
# :param language:
# :return:
# """
# return self.env["eq_report_helper"].get_qty(value, language, 'Sale Quantity Report')
@api.multi
def html_text_is_set(self, value):
"""
Workaround für HTML-Texte: Autom. Inhalt nach Speichern ohne Inhalt: <p><br></p>
Entfernen der Zeilenumbrüche und Paragraphen für Test, ob ein Inhalt gesetzt wurde
:param value:
:return:
"""
if not value:
return False
value = value.replace('<br>', '')
value = value.replace('<p>', '')
value = value.replace('</p>', '')
value = value.replace('<', '')
value = value.replace('>', '')
value = value.replace('/', '')
value = value.strip()
return value != ''<|fim▁end|> | |
<|file_name|>test_pipeline.py<|end_file_name|><|fim▁begin|>"""
Test the pipeline module.
"""
import numpy as np
from scipy import sparse
from sklearn.externals.six.moves import zip
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raises_regex
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_dict_equal
from sklearn.base import clone, BaseEstimator
from sklearn.pipeline import Pipeline, FeatureUnion, make_pipeline, make_union
from sklearn.svm import SVC
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import LinearRegression
from sklearn.cluster import KMeans
from sklearn.feature_selection import SelectKBest, f_classif
from sklearn.decomposition import PCA, TruncatedSVD
from sklearn.datasets import load_iris
from sklearn.preprocessing import StandardScaler
from sklearn.feature_extraction.text import CountVectorizer
JUNK_FOOD_DOCS = (
"the pizza pizza beer copyright",
"the pizza burger beer copyright",
"the the pizza beer beer copyright",
"the burger beer beer copyright",
"the coke burger coke copyright",
"the coke burger burger",
)
class NoFit(object):
"""Small class to test parameter dispatching.
"""
def __init__(self, a=None, b=None):
self.a = a
self.b = b
class NoTrans(NoFit):
def fit(self, X, y):
return self
def get_params(self, deep=False):
return {'a': self.a, 'b': self.b}
def set_params(self, **params):
self.a = params['a']
return self
class NoInvTransf(NoTrans):
def transform(self, X, y=None):
return X
class Transf(NoInvTransf):
def transform(self, X, y=None):
return X
def inverse_transform(self, X):
return X
class Mult(BaseEstimator):
def __init__(self, mult=1):
self.mult = mult
def fit(self, X, y):
return self
def transform(self, X):
return np.asarray(X) * self.mult
def inverse_transform(self, X):
return np.asarray(X) / self.mult
def predict(self, X):
return (np.asarray(X) * self.mult).sum(axis=1)
predict_proba = predict_log_proba = decision_function = predict
def score(self, X, y=None):
return np.sum(X)
class FitParamT(BaseEstimator):
"""Mock classifier
"""
def __init__(self):
self.successful = False
<|fim▁hole|> def predict(self, X):
return self.successful
def test_pipeline_init():
# Test the various init parameters of the pipeline.
assert_raises(TypeError, Pipeline)
# Check that we can't instantiate pipelines with objects without fit
# method
assert_raises_regex(TypeError,
'Last step of Pipeline should implement fit. '
'.*NoFit.*',
Pipeline, [('clf', NoFit())])
# Smoke test with only an estimator
clf = NoTrans()
pipe = Pipeline([('svc', clf)])
assert_equal(pipe.get_params(deep=True),
dict(svc__a=None, svc__b=None, svc=clf,
**pipe.get_params(deep=False)))
# Check that params are set
pipe.set_params(svc__a=0.1)
assert_equal(clf.a, 0.1)
assert_equal(clf.b, None)
# Smoke test the repr:
repr(pipe)
# Test with two objects
clf = SVC()
filter1 = SelectKBest(f_classif)
pipe = Pipeline([('anova', filter1), ('svc', clf)])
# Check that we can't instantiate with non-transformers on the way
# Note that NoTrans implements fit, but not transform
assert_raises_regex(TypeError,
'All intermediate steps should be transformers'
'.*\\bNoTrans\\b.*',
Pipeline, [('t', NoTrans()), ('svc', clf)])
# Check that params are set
pipe.set_params(svc__C=0.1)
assert_equal(clf.C, 0.1)
# Smoke test the repr:
repr(pipe)
# Check that params are not set when naming them wrong
assert_raises(ValueError, pipe.set_params, anova__C=0.1)
# Test clone
pipe2 = clone(pipe)
assert_false(pipe.named_steps['svc'] is pipe2.named_steps['svc'])
# Check that apart from estimators, the parameters are the same
params = pipe.get_params(deep=True)
params2 = pipe2.get_params(deep=True)
for x in pipe.get_params(deep=False):
params.pop(x)
for x in pipe2.get_params(deep=False):
params2.pop(x)
# Remove estimators that where copied
params.pop('svc')
params.pop('anova')
params2.pop('svc')
params2.pop('anova')
assert_equal(params, params2)
def test_pipeline_methods_anova():
# Test the various methods of the pipeline (anova).
iris = load_iris()
X = iris.data
y = iris.target
# Test with Anova + LogisticRegression
clf = LogisticRegression()
filter1 = SelectKBest(f_classif, k=2)
pipe = Pipeline([('anova', filter1), ('logistic', clf)])
pipe.fit(X, y)
pipe.predict(X)
pipe.predict_proba(X)
pipe.predict_log_proba(X)
pipe.score(X, y)
def test_pipeline_fit_params():
# Test that the pipeline can take fit parameters
pipe = Pipeline([('transf', Transf()), ('clf', FitParamT())])
pipe.fit(X=None, y=None, clf__should_succeed=True)
# classifier should return True
assert_true(pipe.predict(None))
# and transformer params should not be changed
assert_true(pipe.named_steps['transf'].a is None)
assert_true(pipe.named_steps['transf'].b is None)
def test_pipeline_raise_set_params_error():
# Test pipeline raises set params error message for nested models.
pipe = Pipeline([('cls', LinearRegression())])
# expected error message
error_msg = ('Invalid parameter %s for estimator %s. '
'Check the list of available parameters '
'with `estimator.get_params().keys()`.')
assert_raise_message(ValueError,
error_msg % ('fake', 'Pipeline'),
pipe.set_params,
fake='nope')
# nested model check
assert_raise_message(ValueError,
error_msg % ("fake", pipe),
pipe.set_params,
fake__estimator='nope')
def test_pipeline_methods_pca_svm():
# Test the various methods of the pipeline (pca + svm).
iris = load_iris()
X = iris.data
y = iris.target
# Test with PCA + SVC
clf = SVC(probability=True, random_state=0)
pca = PCA(svd_solver='full', n_components='mle', whiten=True)
pipe = Pipeline([('pca', pca), ('svc', clf)])
pipe.fit(X, y)
pipe.predict(X)
pipe.predict_proba(X)
pipe.predict_log_proba(X)
pipe.score(X, y)
def test_pipeline_methods_preprocessing_svm():
# Test the various methods of the pipeline (preprocessing + svm).
iris = load_iris()
X = iris.data
y = iris.target
n_samples = X.shape[0]
n_classes = len(np.unique(y))
scaler = StandardScaler()
pca = PCA(n_components=2, svd_solver='randomized', whiten=True)
clf = SVC(probability=True, random_state=0, decision_function_shape='ovr')
for preprocessing in [scaler, pca]:
pipe = Pipeline([('preprocess', preprocessing), ('svc', clf)])
pipe.fit(X, y)
# check shapes of various prediction functions
predict = pipe.predict(X)
assert_equal(predict.shape, (n_samples,))
proba = pipe.predict_proba(X)
assert_equal(proba.shape, (n_samples, n_classes))
log_proba = pipe.predict_log_proba(X)
assert_equal(log_proba.shape, (n_samples, n_classes))
decision_function = pipe.decision_function(X)
assert_equal(decision_function.shape, (n_samples, n_classes))
pipe.score(X, y)
def test_fit_predict_on_pipeline():
# test that the fit_predict method is implemented on a pipeline
# test that the fit_predict on pipeline yields same results as applying
# transform and clustering steps separately
iris = load_iris()
scaler = StandardScaler()
km = KMeans(random_state=0)
# first compute the transform and clustering step separately
scaled = scaler.fit_transform(iris.data)
separate_pred = km.fit_predict(scaled)
# use a pipeline to do the transform and clustering in one step
pipe = Pipeline([('scaler', scaler), ('Kmeans', km)])
pipeline_pred = pipe.fit_predict(iris.data)
assert_array_almost_equal(pipeline_pred, separate_pred)
def test_fit_predict_on_pipeline_without_fit_predict():
# tests that a pipeline does not have fit_predict method when final
# step of pipeline does not have fit_predict defined
scaler = StandardScaler()
pca = PCA(svd_solver='full')
pipe = Pipeline([('scaler', scaler), ('pca', pca)])
assert_raises_regex(AttributeError,
"'PCA' object has no attribute 'fit_predict'",
getattr, pipe, 'fit_predict')
def test_feature_union():
# basic sanity check for feature union
iris = load_iris()
X = iris.data
X -= X.mean(axis=0)
y = iris.target
svd = TruncatedSVD(n_components=2, random_state=0)
select = SelectKBest(k=1)
fs = FeatureUnion([("svd", svd), ("select", select)])
fs.fit(X, y)
X_transformed = fs.transform(X)
assert_equal(X_transformed.shape, (X.shape[0], 3))
# check if it does the expected thing
assert_array_almost_equal(X_transformed[:, :-1], svd.fit_transform(X))
assert_array_equal(X_transformed[:, -1],
select.fit_transform(X, y).ravel())
# test if it also works for sparse input
# We use a different svd object to control the random_state stream
fs = FeatureUnion([("svd", svd), ("select", select)])
X_sp = sparse.csr_matrix(X)
X_sp_transformed = fs.fit_transform(X_sp, y)
assert_array_almost_equal(X_transformed, X_sp_transformed.toarray())
# test setting parameters
fs.set_params(select__k=2)
assert_equal(fs.fit_transform(X, y).shape, (X.shape[0], 4))
# test it works with transformers missing fit_transform
fs = FeatureUnion([("mock", Transf()), ("svd", svd), ("select", select)])
X_transformed = fs.fit_transform(X, y)
assert_equal(X_transformed.shape, (X.shape[0], 8))
# test error if some elements do not support transform
assert_raises_regex(TypeError,
'All estimators should implement fit and '
'transform.*\\bNoTrans\\b',
FeatureUnion,
[("transform", Transf()), ("no_transform", NoTrans())])
def test_make_union():
pca = PCA(svd_solver='full')
mock = Transf()
fu = make_union(pca, mock)
names, transformers = zip(*fu.transformer_list)
assert_equal(names, ("pca", "transf"))
assert_equal(transformers, (pca, mock))
def test_pipeline_transform():
# Test whether pipeline works with a transformer at the end.
# Also test pipeline.transform and pipeline.inverse_transform
iris = load_iris()
X = iris.data
pca = PCA(n_components=2, svd_solver='full')
pipeline = Pipeline([('pca', pca)])
# test transform and fit_transform:
X_trans = pipeline.fit(X).transform(X)
X_trans2 = pipeline.fit_transform(X)
X_trans3 = pca.fit_transform(X)
assert_array_almost_equal(X_trans, X_trans2)
assert_array_almost_equal(X_trans, X_trans3)
X_back = pipeline.inverse_transform(X_trans)
X_back2 = pca.inverse_transform(X_trans)
assert_array_almost_equal(X_back, X_back2)
def test_pipeline_fit_transform():
# Test whether pipeline works with a transformer missing fit_transform
iris = load_iris()
X = iris.data
y = iris.target
transf = Transf()
pipeline = Pipeline([('mock', transf)])
# test fit_transform:
X_trans = pipeline.fit_transform(X, y)
X_trans2 = transf.fit(X, y).transform(X)
assert_array_almost_equal(X_trans, X_trans2)
def test_set_pipeline_steps():
transf1 = Transf()
transf2 = Transf()
pipeline = Pipeline([('mock', transf1)])
assert_true(pipeline.named_steps['mock'] is transf1)
# Directly setting attr
pipeline.steps = [('mock2', transf2)]
assert_true('mock' not in pipeline.named_steps)
assert_true(pipeline.named_steps['mock2'] is transf2)
assert_equal([('mock2', transf2)], pipeline.steps)
# Using set_params
pipeline.set_params(steps=[('mock', transf1)])
assert_equal([('mock', transf1)], pipeline.steps)
# Using set_params to replace single step
pipeline.set_params(mock=transf2)
assert_equal([('mock', transf2)], pipeline.steps)
# With invalid data
pipeline.set_params(steps=[('junk', ())])
assert_raises(TypeError, pipeline.fit, [[1]], [1])
assert_raises(TypeError, pipeline.fit_transform, [[1]], [1])
def test_set_pipeline_step_none():
# Test setting Pipeline steps to None
X = np.array([[1]])
y = np.array([1])
mult2 = Mult(mult=2)
mult3 = Mult(mult=3)
mult5 = Mult(mult=5)
def make():
return Pipeline([('m2', mult2), ('m3', mult3), ('last', mult5)])
pipeline = make()
exp = 2 * 3 * 5
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal([exp], pipeline.fit(X).predict(X))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
pipeline.set_params(m3=None)
exp = 2 * 5
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal([exp], pipeline.fit(X).predict(X))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
assert_dict_equal(pipeline.get_params(deep=True),
{'steps': pipeline.steps,
'm2': mult2,
'm3': None,
'last': mult5,
'm2__mult': 2,
'last__mult': 5,
})
pipeline.set_params(m2=None)
exp = 5
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal([exp], pipeline.fit(X).predict(X))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
# for other methods, ensure no AttributeErrors on None:
other_methods = ['predict_proba', 'predict_log_proba',
'decision_function', 'transform', 'score']
for method in other_methods:
getattr(pipeline, method)(X)
pipeline.set_params(m2=mult2)
exp = 2 * 5
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal([exp], pipeline.fit(X).predict(X))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
pipeline = make()
pipeline.set_params(last=None)
# mult2 and mult3 are active
exp = 6
assert_array_equal([[exp]], pipeline.fit(X, y).transform(X))
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
assert_raise_message(AttributeError,
"'NoneType' object has no attribute 'predict'",
getattr, pipeline, 'predict')
# Check None step at construction time
exp = 2 * 5
pipeline = Pipeline([('m2', mult2), ('m3', None), ('last', mult5)])
assert_array_equal([[exp]], pipeline.fit_transform(X, y))
assert_array_equal([exp], pipeline.fit(X).predict(X))
assert_array_equal(X, pipeline.inverse_transform([[exp]]))
def test_pipeline_ducktyping():
pipeline = make_pipeline(Mult(5))
pipeline.predict
pipeline.transform
pipeline.inverse_transform
pipeline = make_pipeline(Transf())
assert_false(hasattr(pipeline, 'predict'))
pipeline.transform
pipeline.inverse_transform
pipeline = make_pipeline(None)
assert_false(hasattr(pipeline, 'predict'))
pipeline.transform
pipeline.inverse_transform
pipeline = make_pipeline(Transf(), NoInvTransf())
assert_false(hasattr(pipeline, 'predict'))
pipeline.transform
assert_false(hasattr(pipeline, 'inverse_transform'))
pipeline = make_pipeline(NoInvTransf(), Transf())
assert_false(hasattr(pipeline, 'predict'))
pipeline.transform
assert_false(hasattr(pipeline, 'inverse_transform'))
def test_make_pipeline():
t1 = Transf()
t2 = Transf()
pipe = make_pipeline(t1, t2)
assert_true(isinstance(pipe, Pipeline))
assert_equal(pipe.steps[0][0], "transf-1")
assert_equal(pipe.steps[1][0], "transf-2")
pipe = make_pipeline(t1, t2, FitParamT())
assert_true(isinstance(pipe, Pipeline))
assert_equal(pipe.steps[0][0], "transf-1")
assert_equal(pipe.steps[1][0], "transf-2")
assert_equal(pipe.steps[2][0], "fitparamt")
def test_feature_union_weights():
# test feature union with transformer weights
iris = load_iris()
X = iris.data
y = iris.target
pca = PCA(n_components=2, svd_solver='randomized', random_state=0)
select = SelectKBest(k=1)
# test using fit followed by transform
fs = FeatureUnion([("pca", pca), ("select", select)],
transformer_weights={"pca": 10})
fs.fit(X, y)
X_transformed = fs.transform(X)
# test using fit_transform
fs = FeatureUnion([("pca", pca), ("select", select)],
transformer_weights={"pca": 10})
X_fit_transformed = fs.fit_transform(X, y)
# test it works with transformers missing fit_transform
fs = FeatureUnion([("mock", Transf()), ("pca", pca), ("select", select)],
transformer_weights={"mock": 10})
X_fit_transformed_wo_method = fs.fit_transform(X, y)
# check against expected result
# We use a different pca object to control the random_state stream
assert_array_almost_equal(X_transformed[:, :-1], 10 * pca.fit_transform(X))
assert_array_equal(X_transformed[:, -1],
select.fit_transform(X, y).ravel())
assert_array_almost_equal(X_fit_transformed[:, :-1],
10 * pca.fit_transform(X))
assert_array_equal(X_fit_transformed[:, -1],
select.fit_transform(X, y).ravel())
assert_equal(X_fit_transformed_wo_method.shape, (X.shape[0], 7))
def test_feature_union_parallel():
# test that n_jobs work for FeatureUnion
X = JUNK_FOOD_DOCS
fs = FeatureUnion([
("words", CountVectorizer(analyzer='word')),
("chars", CountVectorizer(analyzer='char')),
])
fs_parallel = FeatureUnion([
("words", CountVectorizer(analyzer='word')),
("chars", CountVectorizer(analyzer='char')),
], n_jobs=2)
fs_parallel2 = FeatureUnion([
("words", CountVectorizer(analyzer='word')),
("chars", CountVectorizer(analyzer='char')),
], n_jobs=2)
fs.fit(X)
X_transformed = fs.transform(X)
assert_equal(X_transformed.shape[0], len(X))
fs_parallel.fit(X)
X_transformed_parallel = fs_parallel.transform(X)
assert_equal(X_transformed.shape, X_transformed_parallel.shape)
assert_array_equal(
X_transformed.toarray(),
X_transformed_parallel.toarray()
)
# fit_transform should behave the same
X_transformed_parallel2 = fs_parallel2.fit_transform(X)
assert_array_equal(
X_transformed.toarray(),
X_transformed_parallel2.toarray()
)
# transformers should stay fit after fit_transform
X_transformed_parallel2 = fs_parallel2.transform(X)
assert_array_equal(
X_transformed.toarray(),
X_transformed_parallel2.toarray()
)
def test_feature_union_feature_names():
word_vect = CountVectorizer(analyzer="word")
char_vect = CountVectorizer(analyzer="char_wb", ngram_range=(3, 3))
ft = FeatureUnion([("chars", char_vect), ("words", word_vect)])
ft.fit(JUNK_FOOD_DOCS)
feature_names = ft.get_feature_names()
for feat in feature_names:
assert_true("chars__" in feat or "words__" in feat)
assert_equal(len(feature_names), 35)
ft = FeatureUnion([("tr1", Transf())]).fit([[1]])
assert_raise_message(AttributeError,
'Transformer tr1 (type Transf) does not provide '
'get_feature_names', ft.get_feature_names)
def test_classes_property():
iris = load_iris()
X = iris.data
y = iris.target
reg = make_pipeline(SelectKBest(k=1), LinearRegression())
reg.fit(X, y)
assert_raises(AttributeError, getattr, reg, "classes_")
clf = make_pipeline(SelectKBest(k=1), LogisticRegression(random_state=0))
assert_raises(AttributeError, getattr, clf, "classes_")
clf.fit(X, y)
assert_array_equal(clf.classes_, np.unique(y))
def test_X1d_inverse_transform():
transformer = Transf()
pipeline = make_pipeline(transformer)
X = np.ones(10)
msg = "1d X will not be reshaped in pipeline.inverse_transform"
assert_warns_message(FutureWarning, msg, pipeline.inverse_transform, X)
def test_set_feature_union_steps():
mult2 = Mult(2)
mult2.get_feature_names = lambda: ['x2']
mult3 = Mult(3)
mult3.get_feature_names = lambda: ['x3']
mult5 = Mult(5)
mult5.get_feature_names = lambda: ['x5']
ft = FeatureUnion([('m2', mult2), ('m3', mult3)])
assert_array_equal([[2, 3]], ft.transform(np.asarray([[1]])))
assert_equal(['m2__x2', 'm3__x3'], ft.get_feature_names())
# Directly setting attr
ft.transformer_list = [('m5', mult5)]
assert_array_equal([[5]], ft.transform(np.asarray([[1]])))
assert_equal(['m5__x5'], ft.get_feature_names())
# Using set_params
ft.set_params(transformer_list=[('mock', mult3)])
assert_array_equal([[3]], ft.transform(np.asarray([[1]])))
assert_equal(['mock__x3'], ft.get_feature_names())
# Using set_params to replace single step
ft.set_params(mock=mult5)
assert_array_equal([[5]], ft.transform(np.asarray([[1]])))
assert_equal(['mock__x5'], ft.get_feature_names())
def test_set_feature_union_step_none():
mult2 = Mult(2)
mult2.get_feature_names = lambda: ['x2']
mult3 = Mult(3)
mult3.get_feature_names = lambda: ['x3']
X = np.asarray([[1]])
ft = FeatureUnion([('m2', mult2), ('m3', mult3)])
assert_array_equal([[2, 3]], ft.fit(X).transform(X))
assert_array_equal([[2, 3]], ft.fit_transform(X))
assert_equal(['m2__x2', 'm3__x3'], ft.get_feature_names())
ft.set_params(m2=None)
assert_array_equal([[3]], ft.fit(X).transform(X))
assert_array_equal([[3]], ft.fit_transform(X))
assert_equal(['m3__x3'], ft.get_feature_names())
ft.set_params(m3=None)
assert_array_equal([[]], ft.fit(X).transform(X))
assert_array_equal([[]], ft.fit_transform(X))
assert_equal([], ft.get_feature_names())
# check we can change back
ft.set_params(m3=mult3)
assert_array_equal([[3]], ft.fit(X).transform(X))
def test_step_name_validation():
bad_steps1 = [('a__q', Mult(2)), ('b', Mult(3))]
bad_steps2 = [('a', Mult(2)), ('a', Mult(3))]
for cls, param in [(Pipeline, 'steps'),
(FeatureUnion, 'transformer_list')]:
# we validate in construction (despite scikit-learn convention)
bad_steps3 = [('a', Mult(2)), (param, Mult(3))]
for bad_steps, message in [
(bad_steps1, "Step names must not contain __: got ['a__q']"),
(bad_steps2, "Names provided are not unique: ['a', 'a']"),
(bad_steps3, "Step names conflict with constructor "
"arguments: ['%s']" % param),
]:
# three ways to make invalid:
# - construction
assert_raise_message(ValueError, message, cls,
**{param: bad_steps})
# - setattr
est = cls(**{param: [('a', Mult(1))]})
setattr(est, param, bad_steps)
assert_raise_message(ValueError, message, est.fit, [[1]], [1])
assert_raise_message(ValueError, message, est.fit_transform,
[[1]], [1])
# - set_params
est = cls(**{param: [('a', Mult(1))]})
est.set_params(**{param: bad_steps})
assert_raise_message(ValueError, message, est.fit, [[1]], [1])
assert_raise_message(ValueError, message, est.fit_transform,
[[1]], [1])<|fim▁end|> | def fit(self, X, y, should_succeed=False):
self.successful = should_succeed
|
<|file_name|>error.go<|end_file_name|><|fim▁begin|>// Copyright 2014 Canonical Ltd.
// Licensed under the AGPLv3, see LICENCE file for details.<|fim▁hole|>import (
"github.com/juju/juju/agent/tools"
"github.com/juju/juju/version"
)
// UpgradeReadyError is returned by an Upgrader to report that
// an upgrade is ready to be performed and a restart is due.
type UpgradeReadyError struct {
AgentName string
OldTools version.Binary
NewTools version.Binary
DataDir string
}
func (e *UpgradeReadyError) Error() string {
return "must restart: an agent upgrade is available"
}
// ChangeAgentTools does the actual agent upgrade.
// It should be called just before an agent exits, so that
// it will restart running the new tools.
func (e *UpgradeReadyError) ChangeAgentTools() error {
agentTools, err := tools.ChangeAgentTools(e.DataDir, e.AgentName, e.NewTools)
if err != nil {
return err
}
logger.Infof("upgraded from %v to %v (%q)", e.OldTools, agentTools.Version, agentTools.URL)
return nil
}<|fim▁end|> |
package upgrader
|
<|file_name|>gmail_checker.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Sean Vig
# Copyright (c) 2014, 2019 zordsdavini
# Copyright (c) 2014 Alexandr Kriptonov
# Copyright (c) 2014 Tycho Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
from libqtile.log_utils import logger
from libqtile.widget import base
class GmailChecker(base.ThreadPoolText):
"""A simple gmail checker. If 'status_only_unseen' is True - set 'fmt' for one argument, ex. 'unseen: {0}'"""
orientations = base.ORIENTATION_HORIZONTAL
defaults = [
("update_interval", 30, "Update time in seconds."),
("username", None, "username"),
("password", None, "password"),
("email_path", "INBOX", "email_path"),
("display_fmt", "inbox[{0}],unseen[{1}]", "Display format"),
("status_only_unseen", False, "Only show unseen messages"),
]
def __init__(self, **config):
base.ThreadPoolText.__init__(self, "", **config)
self.add_defaults(GmailChecker.defaults)
def poll(self):
self.gmail = imaplib.IMAP4_SSL('imap.gmail.com')
self.gmail.login(self.username, self.password)
answer, raw_data = self.gmail.status(self.email_path,
'(MESSAGES UNSEEN)')
if answer == "OK":
dec = raw_data[0].decode()
messages = int(re.search(r'MESSAGES\s+(\d+)', dec).group(1))
unseen = int(re.search(r'UNSEEN\s+(\d+)', dec).group(1))<|fim▁hole|> return self.display_fmt.format(unseen)
else:
return self.display_fmt.format(messages, unseen)
else:
logger.exception(
'GmailChecker UNKNOWN error, answer: %s, raw_data: %s',
answer, raw_data)
return "UNKNOWN ERROR"<|fim▁end|> | if(self.status_only_unseen): |
<|file_name|>DescribeSpotInstanceRequestsResult.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Contains the output of DescribeSpotInstanceRequests.
* </p>
*/
public class DescribeSpotInstanceRequestsResult implements Serializable, Cloneable {
/**
* One or more Spot Instance requests.
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequests;
/**
* One or more Spot Instance requests.
*
* @return One or more Spot Instance requests.
*/
public java.util.List<SpotInstanceRequest> getSpotInstanceRequests() {
if (spotInstanceRequests == null) {
spotInstanceRequests = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>();
spotInstanceRequests.setAutoConstruct(true);
}
return spotInstanceRequests;
}
/**
* One or more Spot Instance requests.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*/
public void setSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) {
if (spotInstanceRequests == null) {
this.spotInstanceRequests = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size());
spotInstanceRequestsCopy.addAll(spotInstanceRequests);
this.spotInstanceRequests = spotInstanceRequestsCopy;
}
/**
* One or more Spot Instance requests.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setSpotInstanceRequests(java.util.Collection)} or
* {@link #withSpotInstanceRequests(java.util.Collection)} if you want to
* override the existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(SpotInstanceRequest... spotInstanceRequests) {
if (getSpotInstanceRequests() == null) setSpotInstanceRequests(new java.util.ArrayList<SpotInstanceRequest>(spotInstanceRequests.length));
for (SpotInstanceRequest value : spotInstanceRequests) {
getSpotInstanceRequests().add(value);
}
return this;
}
/**
* One or more Spot Instance requests.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param spotInstanceRequests One or more Spot Instance requests.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public DescribeSpotInstanceRequestsResult withSpotInstanceRequests(java.util.Collection<SpotInstanceRequest> spotInstanceRequests) {
if (spotInstanceRequests == null) {
this.spotInstanceRequests = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest> spotInstanceRequestsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<SpotInstanceRequest>(spotInstanceRequests.size());
spotInstanceRequestsCopy.addAll(spotInstanceRequests);
this.spotInstanceRequests = spotInstanceRequestsCopy;
}
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");<|fim▁hole|> }
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSpotInstanceRequests() == null) ? 0 : getSpotInstanceRequests().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof DescribeSpotInstanceRequestsResult == false) return false;
DescribeSpotInstanceRequestsResult other = (DescribeSpotInstanceRequestsResult)obj;
if (other.getSpotInstanceRequests() == null ^ this.getSpotInstanceRequests() == null) return false;
if (other.getSpotInstanceRequests() != null && other.getSpotInstanceRequests().equals(this.getSpotInstanceRequests()) == false) return false;
return true;
}
@Override
public DescribeSpotInstanceRequestsResult clone() {
try {
return (DescribeSpotInstanceRequestsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}<|fim▁end|> | if (getSpotInstanceRequests() != null) sb.append("SpotInstanceRequests: " + getSpotInstanceRequests() );
sb.append("}");
return sb.toString(); |
<|file_name|>serverstatus.cpp<|end_file_name|><|fim▁begin|>#include "serverstatus.h"
#include <sys/types.h>
#include <sys/stat.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <syslog.h>
#include <string.h>
#include <time.h>
#include <iostream>
#include <fstream>
#include <limits>
#include <signal.h>
#include <unistd.h>
#include <vector>
#include <pthread.h>
#include <mutex>
#include "system_stats.h"
#include "unix_functions.h"
#include "config.h"
#include "communication.h"
#include "status_types.h"
using namespace std;
//===================================================================================
// CONFIGURATION SECTION
//===================================================================================
// Version to check with possibly incompatible config files
#define VERSION "v0.6-beta"
// location where the pid file shall be stored
#define PID_FILE "/var/run/serverstatus.pid"
#define MAGIC_NUMBER "-42"
// After one day the counter resets (this is the maximum interval too)
#define MAX_TIME 1440
// Loop-Time: The loop will restart every X seconds
// Note: Some calculations within the loop may take some time.
// There is no guarantee that loop times smaller than a few seconds
// archieved.
// Note: In case of change the interval values need adjustments.
// Default: 60 seconds
#define LOOP_TIME 60
// Per default ServerStatus will look for a configuration file at these positions:
const string PATH[] = {"/usr/local/etc/serverstatus.cfg", "/etc/serverstatus.cfg"};
//===================================================================================
// END OF CONFIGURATION SECTION
//===================================================================================
//===================================================================================
// GLOBAL VARIABLE SECTION
//===================================================================================
// this variable defines when all the loops (main loop, thread loops) shall terminate
volatile sig_atomic_t loop = 1;
pthread_mutex_t thread_Mutex = PTHREAD_MUTEX_INITIALIZER;
// this vector is misused as temporary stack for server-client exchange
vector<thread_value> thread_Val;
//===================================================================================
// END OF GLOBAL VARIABLE SECTION
//===================================================================================
// writes the path to the config file into the submitted parameter:
bool getConfigFilePath(string &output) {
for (int i = 0; i < sizeof(PATH)/sizeof(PATH[0]); i++) {
if (file_exists(PATH[i])) {
output = PATH[i];
return true;
}
}
return false;
}
// reads a pid file without(!) checking its existence
string read_pid_file(const string& name) {
ifstream in(name);
string l;
if (getline(in, l)) {
return l;
} else {
return MAGIC_NUMBER;
}
}
// write pid file. returns false if an error occured
bool write_pid_file(const string& name) {
ofstream of;
of.open(PID_FILE);
if (of.fail()) {
return false;
} else {
of << getpid();
of.close();
return true;
}
}
// checks if a process is running
bool pid_running(int pid) {
return (0 == kill(pid, 0));
}
// used for SIGTERM handling
void terminate(int signum) {
loop = 0;
}
void storeValueGlobal(vector<string> value) {
// value must at least consist out of a type, a id and one value
if (value.size() <= 3) { return; }
// store value in global variable (enter mutex area)
pthread_mutex_lock(&thread_Mutex);
// iterate through all currently stored values and check if this type already exists
int k = 0;
int id = -1;
while ((k < thread_Val.size()) && (id == -1)) {
if ((value[0] == thread_Val[k].section) && (value[1] == thread_Val[k].clientID)) {
id = k;
}
k++;
}
thread_value t;
if (id == -1) {
// create new entry
t.section = value[0];
t.clientID = trim(value[1]);
for (int i = 2; i < value.size(); i++) {
t.value.push_back(atof(trim(value[i]).c_str()));
}
thread_Val.push_back(t);
} else {
// override existing entry
thread_Val[id].value.clear();
for (int i = 2; i < value.size(); i++) {
thread_Val[id].value.push_back(atof(trim(value[i]).c_str()));
}
}
// leave mutex
pthread_mutex_unlock(&thread_Mutex);
}
thread_value readValueGlobal(string section, string clientID) {
pthread_mutex_lock(&thread_Mutex);
thread_value s;
s.section = section;
// s.value stays empty if non is found
for (int i = 0; i < thread_Val.size(); i++){
if ((section == thread_Val[i].section) && (clientID == thread_Val[i].clientID)) {
// copy values into local variable
for (int j = 0; j < thread_Val[i].value.size(); j++) {
s.value.push_back(thread_Val[i].value[j]);
}
// delete "read" entry from global variable
thread_Val[i].value.clear();
}
}
pthread_mutex_unlock(&thread_Mutex);
// return struct
return s;
}
//===================================================================================
// SERVER THREAD:
// creates a thread that waits and listens on a socket for external input
// which is then stored in a global variable (!MUTEX)
//===================================================================================
void *serverThread(void *arg) {
server_thread *s = (server_thread *)arg;
syslog(LOG_NOTICE, "Server thread started; Listening at port %d", s->port);
connection c = create_socket(SERVER, s->port, "127.0.0.1", s->ssl);
// check if connection was created successfully
if (c.socket == -1) {
syslog(LOG_ERR, "Server Thread: Failed to create socket.");
pthread_exit(0);
}
if ((s->ssl) && (s->cert_file[0] != '-') && (s->key_file[0] != '-')) {
if (!load_local_certificate(c, s->cert_file, s->key_file)) {
syslog(LOG_ERR, "Server Thread: Failed to load certificates.");
pthread_exit(0);
}
}
while (loop) {
// wait for input on the socket
string input;
try {
if (!read_from_socket(c, input)) {
continue;
}
syslog(LOG_NOTICE, "Server Thread: Incoming data: %s", input.c_str());
// string is expected to have form such as "type, id, value1, value2, ..."
vector<string> s = split(input, ',');
storeValueGlobal(s);
} catch (int error) {
syslog(LOG_ERR, "Server Thread: An error [%d] occurred.", error);
}
}
destroy_socket(c);
pthread_exit(0);
}
//===================================================================================
// THE MAIN MODE:
// This creates a daemon process that will keep running in the background
// and create json files as output.
// Logging via syslog service possible.
//===================================================================================
void startDaemon(const string &configFile) {
int userID = getuid();
// check for root privileges
if (userID != 0) {
printf("Error: ServerStatus requires root privileges to work properly.\nTry to run serverstatus as root.\n\n");
exit(EXIT_FAILURE);
}
// check for other instances of serverstatus
if (getDaemonStatusRunning(false)) {
printf("Error: ServerStatus is already running. \n");
exit(EXIT_FAILURE);
}
pid_t pid, sid;
pid = fork();
// could not create child process
if (pid < 0) { printf("Starting ServerStatus: [failed] \n"); exit(EXIT_FAILURE); }
// child process created: terminate parent
if (pid > 0) { printf("Starting ServerStatus: [successful]\n"); exit(EXIT_SUCCESS); }
umask(0);
// using syslog local1 for this daemon
setlogmask(LOG_UPTO (LOG_NOTICE));
openlog("ServerStatus", LOG_CONS | LOG_PID | LOG_NDELAY, LOG_LOCAL1);
syslog(LOG_NOTICE, "Started by User %s", getUsernameFromUID(userID).c_str());
// create pid file
if (!write_pid_file(PID_FILE)) {
syslog (LOG_ERR, "Main Thread: pid file could not be created.");
exit(EXIT_FAILURE);
}
syslog(LOG_DEBUG, "Main Thread: pid file successfully written.");
// set sid
sid = setsid();
if (sid < 0) {
syslog (LOG_ERR, "Main Thread: Could not create new sid for child process.");
exit(EXIT_FAILURE);
}
syslog(LOG_DEBUG, "Main Thread: New SID for child process created.");
// change working directory to root dir
if ((chdir("/")) < 0) {
syslog (LOG_ERR, "Main Thread: Could not change working directory.");
exit(EXIT_FAILURE);
}
syslog(LOG_DEBUG, "Main Thread: Changed working directory to root directory.");
close(STDIN_FILENO);
close(STDOUT_FILENO);
close(STDERR_FILENO);
config *configuration = new config(configFile);
syslog(LOG_DEBUG, "Main Thread: Configuration file loaded.");
// Version check
if (configuration->readVersion() != VERSION) {
syslog (LOG_ERR, "Main Thread: Configuration version does not match.");
exit(EXIT_FAILURE);
}
// SIGTERM handling
struct sigaction term;
memset(&term, 0, sizeof(struct sigaction));
term.sa_handler = terminate;
sigaction(SIGTERM, &term, NULL);
syslog(LOG_DEBUG, "Main Thread: SIGTERM handling added.");
// handle server/client mode
pthread_t thread;
pthread_attr_t thread_attr;
if (configuration->readApplicationType() == "server") {
// server requires an additional thread that handles external input
// string *port = new string(configuration->readServerPort());
server_thread s;
s.port = configuration->readServerPort();
s.ssl = configuration->readSSL();
string cert = configuration->readCertFile().c_str();
string key = configuration->readKeyFile().c_str();
s.cert_file = &cert[0u];
s.key_file = &key[0u];
pthread_attr_init(&thread_attr);
pthread_create(&thread, &thread_attr, serverThread, (void *)&s);
}
// get all the different sys_stat sections and create their classes
vector<string> sys_sections = configuration->readSections();
sys_stat sys;
for (int i = 0; i < sys_sections.size(); i++) {
// read interval time and create class for each at top defined status type
sys.interval.push_back(configuration->readInterval(sys_sections[i]));
sys.stat.push_back(new SystemStats(sys_sections[i], configFile));
// if possible load from file
sys.stat[i]->loadFromFile();
syslog(LOG_DEBUG, "Main Thread: SystemStats class %s iniated.", sys_sections[i].c_str());<|fim▁hole|> }
syslog(LOG_DEBUG, "Main Thread: All sys_stat objects created.");
// the loop fires once every LOOP_TIME seconds
int loopIteration = 0;
while(loop) {
// get the duration of function calling...
time_t startTime = clock();
// now do the actual system stat calculations
for (int i = 0; i < sys_sections.size(); i++) {
if ((sys.interval[i] != 0) && (loopIteration % sys.interval[i] == 0)) {
sys.stat[i]->readStatus();
syslog(LOG_DEBUG, "Main Thread: Triggered \"readStatus()\" in %s.", sys_sections[i].c_str());
}
}
// update counter
if (loopIteration < MAX_TIME) { loopIteration++; } else { loopIteration = 0; }
syslog(LOG_DEBUG, "Main Thread: loop no. %d finished", loopIteration);
// now calculate how long we have to sleep
time_t endTime = clock();
int elapsedTime = (endTime - startTime)/CLOCKS_PER_SEC;
sleep(LOOP_TIME - elapsedTime); // sleep the remaining time
}
remove(PID_FILE);
syslog(LOG_NOTICE, "Process terminated.");
closelog();
}
void stopDaemon() {
// check for root privileges
if (getuid() != 0) {
printf("Error: root privileges are required to stop ServerStatus.\n\n");
exit(EXIT_FAILURE);
}
// kill process if running
if (getDaemonStatusRunning(false)) {
string pid = read_pid_file(PID_FILE);
if (pid != MAGIC_NUMBER) {
if (kill(stoi(pid), SIGTERM) == 0) {
// could be killed -> delete pid file (if not already deleted by terminated process)
//remove(PID_FILE);
printf("ServerStatus [%s] was successfully stopped.\n", pid.c_str());
} else {
printf("Error: ServerStatus could not be stopped.\n");
}
} else {
printf("ServerStatus is currently not running. \n");
}
} else {
printf("ServerStatus is currently not running. \n");
}
}
bool getDaemonStatusRunning(bool output) {
bool result = false;
char msg[100];
// check for pid file
if (file_exists(PID_FILE)){
string pid = read_pid_file(PID_FILE);
if (pid != MAGIC_NUMBER) {
// check if process is still running
if (getuid() == 0) {
if (pid_running(stoi(pid.c_str()))) {
sprintf(msg, "ServerStatus is currently running with pid %s. \n", pid.c_str());
result = true;
}
} else {
sprintf(msg, "ServerStatus might be running with pid %s. \nRun as root to get more precise information. \n", pid.c_str());
result = true;
}
}
}
if (output) {
if (!result) {
sprintf(msg, "ServerStatus is currently not running.\n");
}
printf("%s", msg);
}
return result;
}
//===================================================================================
// INPUT HANDLING:
// Parse command line paramter and execute according functions.
//===================================================================================
int main(int argc, char *argv[]) {
// Load configuration
string _configpath;
if (!getConfigFilePath(_configpath)) {
printf("Could not find a configuration file. \nMake sure your configuration file is \"%s\" or \"%s\". \n", PATH[0].c_str(), PATH[1].c_str());
exit(EXIT_FAILURE);
}
/***************************************************************
*** ServerStatus differentiates between two modes:
*** 1) The main mode is starting without any parameters:
*** It then creates a daemon that keeps running in the
*** background until the OS shuts down or the process
*** is killed
*** 2) The secound mode is starting with paramteres:
*** Right now these are:
*** "serverstatus --help" or "serverstatus -h"
*** "start", "restart" and "stop"
*** "status"
***************************************************************/
if ((argc == 0) || ((argc > 0) && (strcmp(argv[1],"start") == 0))) {
// MODE 1: run the daemon
startDaemon(_configpath);
} else if (argc > 0) {
// MODE 2: parse the options:
if ((strcmp(argv[1], "--help") == 0) || (strcmp(argv[1], "-h") == 0)) {
// Show help:
system("man serverstatus");
exit(EXIT_SUCCESS);
}
else if (strcmp(argv[1], "stop") == 0) {
// stop other running instances:
stopDaemon();
exit(EXIT_SUCCESS);
}
else if (strcmp(argv[1], "restart") == 0) {
// stop and start serverstatus again:
stopDaemon();
startDaemon(_configpath);
}
else if (strcmp(argv[1], "status") == 0) {
// return status
getDaemonStatusRunning(true);
}
else if ((strcmp(argv[1], "--config-check") == 0) || (strcmp(argv[1], "-c") == 0)) {
// Check configuration file
config *configuration = new config(_configpath);
configuration->showErrorLog();
configuration->performSecurityCheck(_configpath);
}
else {
printf("command line parameter not recognised. \nUse serverstatus --help to see all possible commands.\n");
}
}
}<|fim▁end|> | |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
import os.path
import re
class Picard(Package):
"""Picard is a set of command line tools for manipulating high-throughput
sequencing (HTS) data and formats such as SAM/BAM/CRAM and VCF.
"""
homepage = "http://broadinstitute.github.io/picard/"
url = "https://github.com/broadinstitute/picard/releases/download/2.9.2/picard.jar"
_urlfmt = "https://github.com/broadinstitute/picard/releases/download/{0}/picard.jar"
_oldurlfmt = 'https://github.com/broadinstitute/picard/releases/download/{0}/picard-tools-{0}.zip'
# They started distributing a single jar file at v2.6.0, prior to<|fim▁hole|> version('2.17.0', '72cc527f1e4ca6a799ae0117af60b54e', expand=False)
version('2.16.0', 'fed8928b03bb36e355656f349e579083', expand=False)
version('2.15.0', '3f5751630b1a3449edda47a0712a64e4', expand=False)
version('2.13.2', '3d7b33fd1f43ad2129e6ec7883af56f5', expand=False)
version('2.10.0', '96f3c11b1c9be9fc8088bc1b7b9f7538', expand=False)
version('2.9.4', '5ce72af4d5efd02fba7084dcfbb3c7b3', expand=False)
version('2.9.3', '3a33c231bcf3a61870c3d44b3b183924', expand=False)
version('2.9.2', '0449279a6a89830917e8bcef3a976ef7', expand=False)
version('2.9.0', 'b711d492f16dfe0084d33e684dca2202', expand=False)
version('2.8.3', '4a181f55d378cd61d0b127a40dfd5016', expand=False)
version('2.6.0', '91f35f22977d9692ce2718270077dc50', expand=False)
version('1.140', '308f95516d94c1f3273a4e7e2b315ec2')
depends_on('java@8:', type='run')
def install(self, spec, prefix):
mkdirp(prefix.bin)
# The list of files to install varies with release...
# ... but skip the spack-{build.env}.out files.
files = [x for x in glob.glob("*") if not re.match("^spack-", x)]
for f in files:
install(f, prefix.bin)
# Set up a helper script to call java on the jar file,
# explicitly codes the path for java and the jar file.
script_sh = join_path(os.path.dirname(__file__), "picard.sh")
script = prefix.bin.picard
install(script_sh, script)
set_executable(script)
# Munge the helper script to explicitly point to java and the
# jar file.
java = self.spec['java'].prefix.bin.java
kwargs = {'ignore_absent': False, 'backup': False, 'string': False}
filter_file('^java', java, script, **kwargs)
filter_file('picard.jar', join_path(prefix.bin, 'picard.jar'),
script, **kwargs)
def setup_environment(self, spack_env, run_env):
"""The Picard docs suggest setting this as a convenience."""
run_env.prepend_path('PICARD',
join_path(self.prefix, 'bin', 'picard.jar'))
def url_for_version(self, version):
if version < Version('2.6.0'):
return self._oldurlfmt.format(version)
else:
return self._urlfmt.format(version)<|fim▁end|> | # that it was a .zip file with multiple .jar and .so files
version('2.18.3', '181b1b0731fd35f0d8bd44677d8787e9', expand=False)
version('2.18.0', '20045ff141e4a67512365f0b6bbd8229', expand=False) |
<|file_name|>fieldlinesstate.cpp<|end_file_name|><|fim▁begin|>/*****************************************************************************************
* *
* OpenSpace *
* *
* Copyright (c) 2014-2022 *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy of this *
* software and associated documentation files (the "Software"), to deal in the Software *
* without restriction, including without limitation the rights to use, copy, modify, *
* merge, publish, distribute, sublicense, and/or sell copies of the Software, and to *
* permit persons to whom the Software is furnished to do so, subject to the following *
* conditions: *
* *
* The above copyright notice and this permission notice shall be included in all copies *
* or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, *
* INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A *
* PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT *
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF *
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE *
* OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *
****************************************************************************************/
#include <modules/fieldlinessequence/util/fieldlinesstate.h>
#include <openspace/json.h>
#include <openspace/util/time.h>
#include <ghoul/fmt.h>
#include <ghoul/logging/logmanager.h>
#include <fstream>
#include <iomanip>
namespace {
constexpr const char* _loggerCat = "FieldlinesState";
constexpr const int CurrentVersion = 0;
using json = nlohmann::json;
} // namespace
namespace openspace {
/**
* Converts all glm::vec3 in _vertexPositions from spherical (radius, latitude, longitude)
* coordinates into cartesian coordinates. The longitude and latitude coordinates are
* expected to be in degrees. scale is an optional scaling factor.
*/
void FieldlinesState::convertLatLonToCartesian(float scale) {
for (glm::vec3& p : _vertexPositions) {
const float r = p.x * scale;
const float lat = glm::radians(p.y);
const float lon = glm::radians(p.z);
const float rCosLat = r * cos(lat);
p = glm::vec3(rCosLat * cos(lon), rCosLat* sin(lon), r * sin(lat));
}
}
void FieldlinesState::scalePositions(float scale) {
for (glm::vec3& p : _vertexPositions) {
p *= scale;
}
}
bool FieldlinesState::loadStateFromOsfls(const std::string& pathToOsflsFile) {
std::ifstream ifs(pathToOsflsFile, std::ifstream::binary);
if (!ifs.is_open()) {
LERROR("Couldn't open file: " + pathToOsflsFile);
return false;
}
int binFileVersion;
ifs.read(reinterpret_cast<char*>(&binFileVersion), sizeof(int));
switch (binFileVersion) {
case 0:
// No need to put everything in this scope now, as only version 0 exists!
break;
default:
LERROR("VERSION OF BINARY FILE WAS NOT RECOGNIZED!");
return false;
}
// Define tmp variables to store meta data in
size_t nLines;
size_t nPoints;
size_t nExtras;
size_t byteSizeAllNames;
// Read single value variables
ifs.read(reinterpret_cast<char*>(&_triggerTime), sizeof(double));
ifs.read(reinterpret_cast<char*>(&_model), sizeof(int32_t));
ifs.read(reinterpret_cast<char*>(&_isMorphable), sizeof(bool));
ifs.read(reinterpret_cast<char*>(&nLines), sizeof(uint64_t));
ifs.read(reinterpret_cast<char*>(&nPoints), sizeof(uint64_t));
ifs.read(reinterpret_cast<char*>(&nExtras), sizeof(uint64_t));
ifs.read(reinterpret_cast<char*>(&byteSizeAllNames), sizeof(uint64_t));
_lineStart.resize(nLines);
_lineCount.resize(nLines);
_vertexPositions.resize(nPoints);
_extraQuantities.resize(nExtras);
_extraQuantityNames.resize(nExtras);
// Read vertex position data
ifs.read(reinterpret_cast<char*>(_lineStart.data()), sizeof(int32_t) * nLines);
ifs.read(reinterpret_cast<char*>(_lineCount.data()), sizeof(uint32_t) * nLines);
ifs.read(
reinterpret_cast<char*>(_vertexPositions.data()),
3 * sizeof(float) * nPoints
);
// Read all extra quantities
for (std::vector<float>& vec : _extraQuantities) {
vec.resize(nPoints);
ifs.read(reinterpret_cast<char*>(vec.data()), sizeof(float) * nPoints);
}
// Read all extra quantities' names. Stored as multiple c-strings
std::string allNamesInOne;
std::vector<char> buffer(byteSizeAllNames);
ifs.read(buffer.data(), byteSizeAllNames);
allNamesInOne.assign(buffer.data(), byteSizeAllNames);
size_t offset = 0;
for (size_t i = 0; i < nExtras; ++i) {
auto endOfVarName = allNamesInOne.find('\0', offset);
endOfVarName -= offset;
const std::string varName = allNamesInOne.substr(offset, endOfVarName);
offset += varName.size() + 1;
_extraQuantityNames[i] = varName;
}
return true;
}
bool FieldlinesState::loadStateFromJson(const std::string& pathToJsonFile,
fls::Model Model, float coordToMeters)
{
// --------------------- ENSURE FILE IS VALID, THEN PARSE IT --------------------- //
std::ifstream ifs(pathToJsonFile);
if (!ifs.is_open()) {
LERROR(fmt::format("FAILED TO OPEN FILE: {}", pathToJsonFile));
return false;
}
json jFile;
ifs >> jFile;
// -------------------------------------------------------------------------------- //
_model = Model;
const char* sData = "data";
const char* sTrace = "trace";
// ----- EXTRACT THE EXTRA QUANTITY NAMES & TRIGGER TIME (same for all lines) ----- //
{
const char* sTime = "time";
const json& jTmp = *(jFile.begin()); // First field line in the file
_triggerTime = Time::convertTime(jTmp[sTime]);
const char* sColumns = "columns";
const json::value_type& variableNameVec = jTmp[sTrace][sColumns];
const size_t nVariables = variableNameVec.size();
const size_t nPosComponents = 3; // x,y,z
if (nVariables < nPosComponents) {
LERROR(
pathToJsonFile + ": Each field '" + sColumns +
"' must contain the variables: 'x', 'y' and 'z' (order is important)."
);
return false;
}
for (size_t i = nPosComponents ; i < nVariables ; ++i) {
_extraQuantityNames.push_back(variableNameVec[i]);
}
}
const size_t nExtras = _extraQuantityNames.size();
_extraQuantities.resize(nExtras);
size_t lineStartIdx = 0;
// Loop through all fieldlines
for (json::iterator lineIter = jFile.begin(); lineIter != jFile.end(); ++lineIter) {
// The 'data' field in the 'trace' variable contains all vertex positions and the
// extra quantities. Each element is an array related to one vertex point.
const std::vector<std::vector<float>>& jData = (*lineIter)[sTrace][sData];
const size_t nPoints = jData.size();
for (size_t j = 0; j < nPoints; ++j) {
const std::vector<float>& variables = jData[j];
// Expects the x, y and z variables to be stored first!
const size_t xIdx = 0;
const size_t yIdx = 1;
const size_t zIdx = 2;
_vertexPositions.push_back(
coordToMeters * glm::vec3(
variables[xIdx],
variables[yIdx],
variables[zIdx]
)
);
// Add the extra quantites. Stored in the same array as the x,y,z variables.
// Hence index of the first extra quantity = 3
for (size_t xtraIdx = 3, k = 0 ; k < nExtras; ++k, ++xtraIdx) {
_extraQuantities[k].push_back(variables[xtraIdx]);
}
}
_lineCount.push_back(static_cast<GLsizei>(nPoints));
_lineStart.push_back(static_cast<GLsizei>(lineStartIdx));
lineStartIdx += nPoints;
}
return true;
}
/**
* \param absPath must be the path to the file (incl. filename but excl. extension!)
* Directory must exist! File is created (or overwritten if already existing).
* File is structured like this: (for version 0)
* 0. int - version number of binary state file! (in case something
* needs to be altered in the future, then increase
* CurrentVersion)
* 1. double - _triggerTime
* 2. int - _model
* 3. bool - _isMorphable
* 4. size_t - Number of lines in the state == _lineStart.size()
* == _lineCount.size()
* 5. size_t - Total number of vertex points == _vertexPositions.size()
* == _extraQuantities[i].size()
* 6. size_t - Number of extra quantites == _extraQuantities.size()
* == _extraQuantityNames.size()
* 7. site_t - Number of total bytes that ALL _extraQuantityNames
* consists of (Each such name is stored as a c_str which
* means it ends with the null char '\0' )
* 7. std::vector<GLint> - _lineStart
* 8. std::vector<GLsizei> - _lineCount
* 9. std::vector<glm::vec3> - _vertexPositions
* 10. std::vector<float> - _extraQuantities
* 11. array of c_str - Strings naming the extra quantities (elements of
* _extraQuantityNames). Each string ends with null char '\0'
*/
void FieldlinesState::saveStateToOsfls(const std::string& absPath) {
// ------------------------------- Create the file ------------------------------- //
std::string pathSafeTimeString = std::string(Time(_triggerTime).ISO8601());
pathSafeTimeString.replace(13, 1, "-");
pathSafeTimeString.replace(16, 1, "-");
pathSafeTimeString.replace(19, 1, "-");
const std::string& fileName = pathSafeTimeString + ".osfls";
std::ofstream ofs(absPath + fileName, std::ofstream::binary | std::ofstream::trunc);
if (!ofs.is_open()) {
LERROR(fmt::format(
"Failed to save state to binary file: {}{}", absPath, fileName
));
return;
}
// --------- Add each string of _extraQuantityNames into one long string --------- //
std::string allExtraQuantityNamesInOne = "";
for (const std::string& str : _extraQuantityNames) {
allExtraQuantityNamesInOne += str + '\0'; // Add null char '\0' for easier reading
}
const size_t nLines = _lineStart.size();
const size_t nPoints = _vertexPositions.size();
const size_t nExtras = _extraQuantities.size();
const size_t nStringBytes = allExtraQuantityNamesInOne.size();
//----------------------------- WRITE EVERYTHING TO FILE -----------------------------
// VERSION OF BINARY FIELDLINES STATE FILE - IN CASE STRUCTURE CHANGES IN THE FUTURE
ofs.write(reinterpret_cast<const char*>(&CurrentVersion), sizeof(int));
//-------------------- WRITE META DATA FOR STATE --------------------------------
ofs.write(reinterpret_cast<const char*>(&_triggerTime), sizeof(_triggerTime));
ofs.write(reinterpret_cast<const char*>(&_model), sizeof(int32_t));
ofs.write(reinterpret_cast<const char*>(&_isMorphable), sizeof(bool));
ofs.write(reinterpret_cast<const char*>(&nLines), sizeof(uint64_t));
ofs.write(reinterpret_cast<const char*>(&nPoints), sizeof(uint64_t));
ofs.write(reinterpret_cast<const char*>(&nExtras), sizeof(uint64_t));
ofs.write(reinterpret_cast<const char*>(&nStringBytes), sizeof(uint64_t));
//---------------------- WRITE ALL ARRAYS OF DATA --------------------------------
ofs.write(reinterpret_cast<char*>(_lineStart.data()), sizeof(int32_t) * nLines);
ofs.write(reinterpret_cast<char*>(_lineCount.data()), sizeof(uint32_t) * nLines);
ofs.write(
reinterpret_cast<char*>(_vertexPositions.data()),
3 * sizeof(float) * nPoints
);
// Write the data for each vector in _extraQuantities
for (std::vector<float>& vec : _extraQuantities) {
ofs.write(reinterpret_cast<char*>(vec.data()), sizeof(float) * nPoints);
}
ofs.write(allExtraQuantityNamesInOne.c_str(), nStringBytes);
}
// TODO: This should probably be rewritten, but this is the way the files were structured
// by CCMC
// Structure of File! NO TRAILING COMMAS ALLOWED!
// Additional info can be stored within each line as the code only extracts the keys it
// needs (time, trace & data)
// The key/name of each line ("0" & "1" in the example below) is arbitrary
// {
// "0":{
// "time": "YYYY-MM-DDTHH:MM:SS.XXX",
// "trace": {
// "columns": ["x","y","z","s","temperature","rho","j_para"],
// "data": [[8.694,127.853,115.304,0.0,0.047,9.249,-5e-10],...,
// [8.698,127.253,114.768,0.800,0.0,9.244,-5e-10]]
// },
// },
// "1":{
// "time": "YYYY-MM-DDTHH:MM:SS.XXX
// "trace": {
// "columns": ["x","y","z","s","temperature","rho","j_para"],
// "data": [[8.694,127.853,115.304,0.0,0.047,9.249,-5e-10],...,
// [8.698,127.253,114.768,0.800,0.0,9.244,-5e-10]]
// },
// }
// }
void FieldlinesState::saveStateToJson(const std::string& absPath) {
// Create the file
const char* ext = ".json";
std::ofstream ofs(absPath + ext, std::ofstream::trunc);
if (!ofs.is_open()) {
LERROR(fmt::format(
"Failed to save state to json file at location: {}{}", absPath, ext
));
return;
}
LINFO(fmt::format("Saving fieldline state to: {}{}", absPath, ext));
json jColumns = { "x", "y", "z" };
for (const std::string& s : _extraQuantityNames) {
jColumns.push_back(s);
}
json jFile;
std::string_view timeStr = Time(_triggerTime).ISO8601();
const size_t nLines = _lineStart.size();
// const size_t nPoints = _vertexPositions.size();
const size_t nExtras = _extraQuantities.size();
size_t pointIndex = 0;
for (size_t lineIndex = 0; lineIndex < nLines; ++lineIndex) {
json jData = json::array();
for (GLsizei i = 0; i < _lineCount[lineIndex]; i++, ++pointIndex) {
const glm::vec3 pos = _vertexPositions[pointIndex];
json jDataElement = { pos.x, pos.y, pos.z };
for (size_t extraIndex = 0; extraIndex < nExtras; ++extraIndex) {
jDataElement.push_back(_extraQuantities[extraIndex][pointIndex]);
}
jData.push_back(jDataElement);
}
jFile[std::to_string(lineIndex)] = {
{ "time", timeStr },
{ "trace", {
{ "columns", jColumns },
{ "data", jData }
}}
};
}
//----------------------------- WRITE EVERYTHING TO FILE -----------------------------
const int indentationSpaces = 2;
ofs << std::setw(indentationSpaces) << jFile << std::endl;
LINFO(fmt::format("Saved fieldline state to: {}{}", absPath, ext));
}
void FieldlinesState::setModel(fls::Model m) {
_model = m;
}
void FieldlinesState::setTriggerTime(double t) {
_triggerTime = t;
}
// Returns one of the extra quantity vectors, _extraQuantities[index].
// If index is out of scope an empty vector is returned and the referenced bool is false.
std::vector<float> FieldlinesState::extraQuantity(size_t index, bool& isSuccessful) const
{
if (index < _extraQuantities.size()) {
isSuccessful = true;
return _extraQuantities[index];<|fim▁hole|> else {
isSuccessful = false;
LERROR("Provided Index was out of scope!");
return {};
}
}
// Moves the points in @param line over to _vertexPositions and updates
// _lineStart & _lineCount accordingly.
void FieldlinesState::addLine(std::vector<glm::vec3>& line) {
const size_t nNewPoints = line.size();
const size_t nOldPoints = _vertexPositions.size();
_lineStart.push_back(static_cast<GLint>(nOldPoints));
_lineCount.push_back(static_cast<GLsizei>(nNewPoints));
_vertexPositions.reserve(nOldPoints + nNewPoints);
_vertexPositions.insert(
_vertexPositions.end(),
std::make_move_iterator(line.begin()),
std::make_move_iterator(line.end())
);
line.clear();
}
void FieldlinesState::appendToExtra(size_t idx, float val) {
_extraQuantities[idx].push_back(val);
}
void FieldlinesState::setExtraQuantityNames(std::vector<std::string> names) {
_extraQuantityNames = std::move(names);
_extraQuantities.resize(_extraQuantityNames.size());
}
const std::vector<std::vector<float>>& FieldlinesState::extraQuantities() const {
return _extraQuantities;
}
const std::vector<std::string>& FieldlinesState::extraQuantityNames() const {
return _extraQuantityNames;
}
const std::vector<GLsizei>& FieldlinesState::lineCount() const {
return _lineCount;
}
const std::vector<GLint>& FieldlinesState::lineStart() const {
return _lineStart;
}
fls::Model FieldlinesState::FieldlinesState::model() const {
return _model;
}
size_t FieldlinesState::nExtraQuantities() const {
return _extraQuantities.size();
}
double FieldlinesState::triggerTime() const {
return _triggerTime;
}
const std::vector<glm::vec3>& FieldlinesState::vertexPositions() const {
return _vertexPositions;
}
} // namespace openspace<|fim▁end|> | } |
<|file_name|>_lib.js<|end_file_name|><|fim▁begin|>function setupTest( target, event, dataToPaste, externalPassCondition ){
var logNode=document.getElementsByTagName('p')[0].firstChild;
logNode.data='';
if( typeof target==='string' ){
if( target.indexOf('.')>-1 ){ // for example "myElementID.firstChild"
var tmp=target.split('.');
target=document.getElementById(tmp[0])[tmp[1]];
}else{
target=document.getElementById(target);
}
}
/* */
if( target.addEventListener ){
target.addEventListener(event, intermediateListener, false);
}else if(target.attachEvent){
target.attachEvent('on'+event, intermediateListener);
}
if( dataToPaste || event==='paste' ){
logNode.data+='Please place the following text on the clipboard before continuing the test: "'+(dataToPaste || 'clipboard text' )+'"\n';
logNode.parentNode.style.whiteSpace='pre';
if(dataToPaste.indexOf('{')==0){ // sorry about the content sniffing, this is likely a JSON string with alternate clipboard formats
if(dataToPaste.indexOf('text/html')>-1){
logNode.parentNode.appendChild(document.createElement('br'));
logNode.parentNode.appendChild(document.createTextNode('Note: copy all body text from '));
var tmp=logNode.parentNode.appendChild(document.createElement('a'));
tmp.href='support/html_file.htm';
tmp.appendChild(document.createTextNode('this support file'));
logNode.parentNode.appendChild(document.createTextNode(' for this test.'));
}
}
}
if(typeof triggerTestManually==='function'){
logNode.parentNode.appendChild(document.createTextNode(' '));
var btn=logNode.parentNode.appendChild(document.createElement('button'))
btn.type='button';
btn.onclick=function(){
triggerTestManually();
btn.parentNode.removeChild(btn);
}
btn.appendChild(document.createTextNode(' Click here to run test: '));
}else{
logNode.data+='Test in progress, waiting for '+event+' event';
}
if(typeof onTestSetupReady==='function'){
onTestSetupReady(event);
}
function intermediateListener(e){
e=e||window.event;
if(!e.target)e.target=e.srcElement;
if(typeof window.clipboardData != 'undefined' && typeof e.clipboardData=='undefined' )e.clipboardData=window.clipboardData;
try{
var testResult=test(e);
result(testResult);
}catch(e){
result('exception: '+e);
}
}
/* if @autofocus isn't supported.. */
if( document.getElementsByTagName('input').length >1 && document.activeElement == document.body ){
for(var inp, i=0, inputs=document.getElementsByTagName('input');inp=inputs[i];i++){
if(inp.hasAttribute('autofocus'))inp.focus();
}
}
<|fim▁hole|>}
function result(testResult, msg){
var logNode=document.getElementsByTagName('p')[0].firstChild;
if( testResult === true || testResult === false ){
logNode.data= testResult ? 'PASSED' : 'FAILED';
}else if( typeof testResult ==='string' ){
logNode.data=testResult;
}else if( typeof externalPassCondition==='string' ){
logNode.data='\nThis test passes if this text is now on the system clipboard: "'+externalPassCondition+'"';
}
if( msg )logNode.data+='\n'+msg;
/* another return value - or no return - from test() indicates that it is asyncronous and will call testResult() from a timeout or something */
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from iRODSLibrary import iRODSLibrary
__version__ = "0.0.4"
class iRODSLibrary(iRODSLibrary):
""" iRODSLibrary is a client keyword library that uses
the python-irodsclient module from iRODS
https://github.com/irods/python-irodsclient
Examples:<|fim▁hole|> """
ROBOT_LIBRARY_SCOPE = 'GLOBAL'<|fim▁end|> | | Connect To Grid | iPlant | data.iplantcollaborative.org | ${1247} | jdoe | jdoePassword | tempZone
|
<|file_name|>collections.backgroundStyles.js<|end_file_name|><|fim▁begin|>import collectionClass from "./collections.class";
import collectionColor from "./collections.color";
function collectionBackgroundStyles(contentItem) {
return `
.${collectionClass(contentItem)} {
background-color: #${collectionColor(contentItem)};<|fim▁hole|> }
`;
}
export default collectionBackgroundStyles;<|fim▁end|> | |
<|file_name|>drcProcess.js<|end_file_name|><|fim▁begin|>/**
* drcProcess
* Created by dcorns on 1/2/15.
*/
'use strict';
var RunApp = require('./runApp');
var Server = require('./server');
var parseInput = require('./parseInput');
var CommandList = require('./commandList');
var runApp = new RunApp();
var cmds = new CommandList();
cmds.add(['ls', 'pwd', 'service', 'ps']);
var firstServer = new Server('firstServer');
firstServer.start(3000, function(err, cnn){
cnn.on('data', function(data){
parseInput(data, function(err, obj){
if(err) cnn.write(err);
else {<|fim▁hole|> cnn.write('Valid Commands: ' + cmds.listCommands() + '\r\n');
cnn.write('Use # to add parameters: example: ls#/\r\n');
cnn.write('Use - to add options: example: ls#/ -al or ls#-al\r\n');
console.log(cnn.loginID + ' connected');
}
else {
if(cmds.validate(obj.cmd) > -1){
runApp.run(obj.params, cnn, obj.cmd);
}
else{
cnn.write('Valid commands: ' + cmds.listCommands());
}
}
}
});
});
});<|fim▁end|> | if(obj.cmd.substr(0, 6) === 'login:'){
cnn.loginID = obj.cmd.substr(6);
cnn.write('Welcome ' + cnn.loginID + '\r\n'); |
<|file_name|>core_commands4.cc<|end_file_name|><|fim▁begin|>/*****************************************************************************
* Free42 -- an HP-42S calculator simulator
* Copyright (C) 2004-2016 Thomas Okken
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License, version 2,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, see http://www.gnu.org/licenses/.
*****************************************************************************/
#include <stdlib.h>
#include "core_commands2.h"
#include "core_commands3.h"
#include "core_commands4.h"
#include "core_display.h"
#include "core_helpers.h"
#include "core_linalg1.h"
#include "core_sto_rcl.h"
#include "core_variables.h"
/********************************************************/
/* Implementations of HP-42S built-in functions, part 4 */
/********************************************************/
int docmd_insr(arg_struct *arg) {
vartype *m, *newx;
vartype_realmatrix *rm;
vartype_complexmatrix *cm;
int4 rows, columns, i;
int err, refcount;
int interactive;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type != TYPE_REALMATRIX && m->type != TYPE_COMPLEXMATRIX)
return ERR_INVALID_TYPE;
interactive = matedit_mode == 2 || matedit_mode == 3;
if (interactive) {
err = docmd_stoel(NULL);
if (err != ERR_NONE)
return err;
}
if (m->type == TYPE_REALMATRIX) {
rm = (vartype_realmatrix *) m;
rows = rm->rows;
columns = rm->columns;
refcount = rm->array->refcount;
if (interactive) {
newx = new_real(0);
if (newx == NULL)
return ERR_INSUFFICIENT_MEMORY;
}
} else {
cm = (vartype_complexmatrix *) m;
rows = cm->rows;
columns = cm->columns;
refcount = cm->array->refcount;
if (interactive) {
newx = new_complex(0, 0);
if (newx == NULL)
return ERR_INSUFFICIENT_MEMORY;
}
}
if (matedit_i >= rows)
matedit_i = rows - 1;
if (matedit_j >= columns)
matedit_j = columns - 1;
if (refcount == 1) {
/* We have this array to ourselves so we can modify it in place */
err = dimension_array_ref(m, rows + 1, columns);
if (err != ERR_NONE) {
if (interactive)
free_vartype(newx);
return err;
}
rows++;
if (m->type == TYPE_REALMATRIX) {
for (i = rows * columns - 1; i >= (matedit_i + 1) * columns; i--) {
rm->array->is_string[i] = rm->array->is_string[i - columns];
rm->array->data[i] = rm->array->data[i - columns];
}
for (i = matedit_i * columns; i < (matedit_i + 1) * columns; i++) {
rm->array->is_string[i] = 0;
rm->array->data[i] = 0;
}
} else {
for (i = 2 * rows * columns - 1;
i >= 2 * (matedit_i + 1) * columns; i--)
cm->array->data[i] = cm->array->data[i - 2 * columns];
for (i = 2 * matedit_i * columns;
i < 2 * (matedit_i + 1) * columns; i++)
cm->array->data[i] = 0;
}
} else {
/* We're sharing this array. I don't use disentangle() because it
* does not deal with resizing. */
int4 newsize = (rows + 1) * columns;
if (m->type == TYPE_REALMATRIX) {
realmatrix_data *array = (realmatrix_data *)
malloc(sizeof(realmatrix_data));
if (array == NULL) {
if (interactive)
free_vartype(newx);
return ERR_INSUFFICIENT_MEMORY;
}
array->data = (phloat *) malloc(newsize * sizeof(phloat));
if (array->data == NULL) {
if (interactive)
free_vartype(newx);
free(array);
return ERR_INSUFFICIENT_MEMORY;
}
array->is_string = (char *) malloc(newsize);
if (array->is_string == NULL) {
if (interactive)
free_vartype(newx);
free(array->data);
free(array);
return ERR_INSUFFICIENT_MEMORY;
}
for (i = 0; i < matedit_i * columns; i++) {
array->is_string[i] = rm->array->is_string[i];
array->data[i] = rm->array->data[i];
}
for (i = matedit_i * columns; i < (matedit_i + 1) * columns; i++) {
array->is_string[i] = 0;
array->data[i] = 0;
}
for (i = (matedit_i + 1) * columns; i < newsize; i++) {
array->is_string[i] = rm->array->is_string[i - columns];
array->data[i] = rm->array->data[i - columns];
}
array->refcount = 1;
rm->array->refcount--;
rm->array = array;
rm->rows++;
} else {
complexmatrix_data *array = (complexmatrix_data *)
malloc(sizeof(complexmatrix_data));
if (array == NULL) {
if (interactive)
free_vartype(newx);
return ERR_INSUFFICIENT_MEMORY;
}
array->data = (phloat *) malloc(2 * newsize * sizeof(phloat));
if (array->data == NULL) {
if (interactive)
free_vartype(newx);
free(array);
return ERR_INSUFFICIENT_MEMORY;
}
for (i = 0; i < 2 * matedit_i * columns; i++)
array->data[i] = cm->array->data[i];
for (i = 2 * matedit_i * columns;
i < 2 * (matedit_i + 1) * columns; i++)
array->data[i] = 0;
for (i = 2 * (matedit_i + 1) * columns; i < 2 * newsize; i++)
array->data[i] = cm->array->data[i - 2 * columns];
array->refcount = 1;
cm->array->refcount--;
cm->array = array;
cm->rows++;
}
}
if (interactive) {
free_vartype(reg_x);
reg_x = newx;
}
mode_disable_stack_lift = true;
return ERR_NONE;
}
static void invrt_completion(int error, vartype *res) {
if (error == ERR_NONE)
unary_result(res);
}
int docmd_invrt(arg_struct *arg) {
if (reg_x->type == TYPE_REAL || reg_x->type == TYPE_COMPLEX)
return ERR_INVALID_TYPE;
else if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else
return linalg_inv(reg_x, invrt_completion);
}
int docmd_j_add(arg_struct *arg) {
int4 rows, columns;
int4 oldi = matedit_i;
int4 oldj = matedit_j;
int err = matedit_get_dim(&rows, &columns);
if (err != ERR_NONE)
return err;
if (++matedit_j >= columns) {
flags.f.matrix_edge_wrap = 1;
matedit_j = 0;
if (++matedit_i >= rows) {
flags.f.matrix_end_wrap = 1;
if (flags.f.grow) {
if (matedit_mode == 2)
err = dimension_array_ref(matedit_x, rows + 1, columns);
else
err = dimension_array(matedit_name, matedit_length,
rows + 1, columns);
if (err != ERR_NONE) {
matedit_i = oldi;
matedit_j = oldj;
return err;
}
matedit_i = rows;
} else
matedit_i = 0;
} else
flags.f.matrix_end_wrap = 0;
} else {
flags.f.matrix_edge_wrap = 0;
flags.f.matrix_end_wrap = 0;
}
return ERR_NONE;
}
int docmd_j_sub(arg_struct *arg) {
int4 rows, columns;
int err = matedit_get_dim(&rows, &columns);
if (err != ERR_NONE)
return err;
if (--matedit_j < 0) {
flags.f.matrix_edge_wrap = 1;
matedit_j = columns - 1;
if (--matedit_i < 0) {
flags.f.matrix_end_wrap = 1;
matedit_i = rows - 1;
} else
flags.f.matrix_end_wrap = 0;
} else {
flags.f.matrix_edge_wrap = 0;
flags.f.matrix_end_wrap = 0;
}
return ERR_NONE;
}
static int mappable_ln_1_x(phloat x, phloat *y) {
if (x <= -1)
return ERR_INVALID_DATA;
*y = log1p(x);
return ERR_NONE;
}
int docmd_ln_1_x(arg_struct *arg) {
if (reg_x->type == TYPE_REAL || reg_x->type == TYPE_REALMATRIX) {
vartype *v;
int err = map_unary(reg_x, &v, mappable_ln_1_x, NULL);
if (err == ERR_NONE)
unary_result(v);
return err;
} else if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else
return ERR_INVALID_TYPE;
}
int docmd_old(arg_struct *arg) {
return docmd_rclel(NULL);
}
int docmd_posa(arg_struct *arg) {
int pos = -1;
vartype *v;
if (reg_x->type == TYPE_REAL) {
phloat x = ((vartype_real *) reg_x)->x;
char c;
int i;
if (x < 0)
x = -x;
if (x >= 256)
return ERR_INVALID_DATA;
c = to_char(x);
for (i = 0; i < reg_alpha_length; i++)
if (reg_alpha[i] == c) {
pos = i;
break;
}
} else if (reg_x->type == TYPE_STRING) {
vartype_string *s = (vartype_string *) reg_x;
if (s->length != 0) {
int i, j;
for (i = 0; i < reg_alpha_length - s->length; i++) {
for (j = 0; j < s->length; j++)
if (reg_alpha[i + j] != s->text[j])
goto notfound;
pos = i;
break;
notfound:;
}
}
} else
return ERR_INVALID_TYPE;
v = new_real(pos);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
unary_result(v);
return ERR_NONE;
}
int docmd_putm(arg_struct *arg) {
vartype *m;
int4 i, j;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type != TYPE_REALMATRIX && m->type != TYPE_COMPLEXMATRIX)
/* Shouldn't happen, but could, as long as I don't
* implement matrix locking
*/
return ERR_INVALID_TYPE;
if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else if (reg_x->type == TYPE_REAL || reg_x->type == TYPE_COMPLEX)
return ERR_INVALID_TYPE;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *src, *dst;
if (reg_x->type == TYPE_COMPLEXMATRIX)
return ERR_INVALID_TYPE;
src = (vartype_realmatrix *) reg_x;
dst = (vartype_realmatrix *) m;
if (src->rows + matedit_i > dst->rows
|| src->columns + matedit_j > dst->columns)
return ERR_DIMENSION_ERROR;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < src->rows; i++)
for (j = 0; j < src->columns; j++) {
int4 n1 = i * src->columns + j;
int4 n2 = (i + matedit_i) * dst->columns + j + matedit_j;
dst->array->is_string[n2] = src->array->is_string[n1];
dst->array->data[n2] = src->array->data[n1];
}
return ERR_NONE;
} else if (reg_x->type == TYPE_REALMATRIX) {
vartype_realmatrix *src = (vartype_realmatrix *) reg_x;
vartype_complexmatrix *dst = (vartype_complexmatrix *) m;
if (src->rows + matedit_i > dst->rows
|| src->columns + matedit_j > dst->columns)
return ERR_DIMENSION_ERROR;
for (i = 0; i < src->rows * src->columns; i++)
if (src->array->is_string[i])
return ERR_ALPHA_DATA_IS_INVALID;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < src->rows; i++)
for (j = 0; j < src->columns; j++) {
int4 n1 = i * src->columns + j;
int4 n2 = (i + matedit_i) * dst->columns + j + matedit_j;
dst->array->data[n2 * 2] = src->array->data[n1];
dst->array->data[n2 * 2 + 1] = 0;
}
return ERR_NONE;
} else {
vartype_complexmatrix *src = (vartype_complexmatrix *) reg_x;
vartype_complexmatrix *dst = (vartype_complexmatrix *) m;
if (src->rows + matedit_i > dst->rows
|| src->columns + matedit_j > dst->columns)
return ERR_DIMENSION_ERROR;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < src->rows; i++)
for (j = 0; j < src->columns; j++) {
int4 n1 = i * src->columns + j;
int4 n2 = (i + matedit_i) * dst->columns + j + matedit_j;
dst->array->data[n2 * 2] = src->array->data[n1 * 2];
dst->array->data[n2 * 2 + 1] = src->array->data[n1 * 2 + 1];
}
return ERR_NONE;
}
}
int docmd_rclel(arg_struct *arg) {
vartype *m, *v;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) m;
int4 n = matedit_i * rm->columns + matedit_j;
if (rm->array->is_string[n])
v = new_string(phloat_text(rm->array->data[n]),
phloat_length(rm->array->data[n]));
else
v = new_real(rm->array->data[n]);
} else if (m->type == TYPE_COMPLEXMATRIX) {
vartype_complexmatrix *cm = (vartype_complexmatrix *) m;
int4 n = matedit_i * cm->columns + matedit_j;
v = new_complex(cm->array->data[2 * n],
cm->array->data[2 * n + 1]);
} else
return ERR_INVALID_TYPE;
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
recall_result(v);
return ERR_NONE;
}
int docmd_rclij(arg_struct *arg) {
vartype *i, *j;
if (matedit_mode == 0)
return ERR_NONEXISTENT;
i = new_real(matedit_i + 1);
j = new_real(matedit_j + 1);
if (i == NULL || j == NULL) {
free_vartype(i);
free_vartype(j);
return ERR_INSUFFICIENT_MEMORY;
}
recall_two_results(j, i);
return ERR_NONE;
}
int docmd_rnrm(arg_struct *arg) {
if (reg_x->type == TYPE_REALMATRIX) {
vartype *v;
vartype_realmatrix *rm = (vartype_realmatrix *) reg_x;
int4 size = rm->rows * rm->columns;
int4 i, j;
phloat max = 0;
for (i = 0; i < size; i++)
if (rm->array->is_string[i])
return ERR_ALPHA_DATA_IS_INVALID;
for (i = 0; i < rm->rows; i++) {
phloat nrm = 0;
for (j = 0; j < rm->columns; j++) {
phloat x = rm->array->data[i * rm->columns + j];
if (x >= 0)
nrm += x;
else
nrm -= x;
}
if (p_isinf(nrm)) {
if (flags.f.range_error_ignore)
max = POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
break;
}
if (nrm > max)
max = nrm;
}
v = new_real(max);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
unary_result(v);
return ERR_NONE;
} else if (reg_x->type == TYPE_COMPLEXMATRIX) {
vartype *v;
vartype_complexmatrix *cm = (vartype_complexmatrix *) reg_x;
int4 i, j;
phloat max = 0;
for (i = 0; i < cm->rows; i++) {
phloat nrm = 0;
for (j = 0; j < cm->columns; j++) {
phloat re = cm->array->data[2 * (i * cm->columns + j)];
phloat im = cm->array->data[2 * (i * cm->columns + j) + 1];
nrm += hypot(re, im);
}
if (p_isinf(nrm)) {
if (flags.f.range_error_ignore)
max = POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
break;
}
if (nrm > max)
max = nrm;
}
v = new_real(max);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
unary_result(v);
return ERR_NONE;
} else if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else
return ERR_INVALID_TYPE;
}
int docmd_rsum(arg_struct *arg) {
if (reg_x->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) reg_x;
vartype_realmatrix *res;
int4 size = rm->rows * rm->columns;
int4 i, j;
for (i = 0; i < size; i++)
if (rm->array->is_string[i])
return ERR_ALPHA_DATA_IS_INVALID;
res = (vartype_realmatrix *) new_realmatrix(rm->rows, 1);
if (res == NULL)
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < rm->rows; i++) {
phloat sum = 0;
int inf;
for (j = 0; j < rm->columns; j++)
sum += rm->array->data[i * rm->columns + j];
if ((inf = p_isinf(sum)) != 0) {
if (flags.f.range_error_ignore)
sum = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else {
free_vartype((vartype *) res);
return ERR_OUT_OF_RANGE;
}
}
res->array->data[i] = sum;
}
unary_result((vartype *) res);
return ERR_NONE;
} else if (reg_x->type == TYPE_COMPLEXMATRIX) {
vartype_complexmatrix *cm = (vartype_complexmatrix *) reg_x;
vartype_complexmatrix *res;
int4 i, j;
res = (vartype_complexmatrix *) new_complexmatrix(cm->rows, 1);
if (res == NULL)
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < cm->rows; i++) {
phloat sum_re = 0, sum_im = 0;
int inf;
for (j = 0; j < cm->columns; j++) {
sum_re += cm->array->data[2 * (i * cm->columns + j)];
sum_im += cm->array->data[2 * (i * cm->columns + j) + 1];
}
if ((inf = p_isinf(sum_re)) != 0) {
if (flags.f.range_error_ignore)
sum_re = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else {
free_vartype((vartype *) res);
return ERR_OUT_OF_RANGE;
}
}
if ((inf = p_isinf(sum_im)) != 0) {
if (flags.f.range_error_ignore)
sum_im = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else {
free_vartype((vartype *) res);
return ERR_OUT_OF_RANGE;
}
}
res->array->data[2 * i] = sum_re;
res->array->data[2 * i + 1] = sum_im;
}
unary_result((vartype *) res);
return ERR_NONE;
} else if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else
return ERR_INVALID_TYPE;
}
int docmd_swap_r(arg_struct *arg) {
vartype *m;
phloat xx, yy;
int4 x, y, i;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type != TYPE_REALMATRIX && m->type != TYPE_COMPLEXMATRIX)
/* Should not happen, but could, as long as I don't implement
* matrix locking. */
return ERR_INVALID_TYPE;
if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_x->type != TYPE_REAL)
return ERR_INVALID_TYPE;
if (reg_y->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_y->type != TYPE_REAL)
return ERR_INVALID_TYPE;
xx = ((vartype_real *) reg_x)->x;
if (xx <= -2147483648.0 || xx >= 2147483648.0)
return ERR_DIMENSION_ERROR;
x = to_int4(xx);
if (x == 0)
return ERR_DIMENSION_ERROR;
if (x < 0)
x = -x;
x--;
yy = ((vartype_real *) reg_y)->x;
if (yy <= -2147483648.0 || yy >= 2147483648.0)
return ERR_DIMENSION_ERROR;
y = to_int4(yy);
if (y == 0)
return ERR_DIMENSION_ERROR;
if (y < 0)
y = -y;
y--;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) m;
if (x > rm->rows || y > rm->rows)
return ERR_DIMENSION_ERROR;
else if (x == y)
return ERR_NONE;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < rm->columns; i++) {
int4 n1 = x * rm->columns + i;
int4 n2 = y * rm->columns + i;
char tempc = rm->array->is_string[n1];
phloat tempds = rm->array->data[n1];
rm->array->is_string[n1] = rm->array->is_string[n2];
rm->array->data[n1] = rm->array->data[n2];
rm->array->is_string[n2] = tempc;
rm->array->data[n2] = tempds;
}
return ERR_NONE;
} else /* m->type == TYPE_COMPLEXMATRIX */ {
vartype_complexmatrix *cm = (vartype_complexmatrix *) m;
if (x > cm->rows || y > cm->rows)
return ERR_DIMENSION_ERROR;
else if (x == y)
return ERR_NONE;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < 2 * cm->columns; i++) {
int4 n1 = x * 2 * cm->columns + i;
int4 n2 = y * 2 * cm->columns + i;
phloat tempd = cm->array->data[n1];
cm->array->data[n1] = cm->array->data[n2];
cm->array->data[n2] = tempd;
}
return ERR_NONE;
}
}
static int mappable_sinh_r(phloat x, phloat *y) {
int inf;
*y = sinh(x);
if ((inf = p_isinf(*y)) != 0) {
if (flags.f.range_error_ignore)
*y = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
return ERR_NONE;
}
static int mappable_sinh_c(phloat xre, phloat xim, phloat *yre, phloat *yim) {
phloat sinhxre, coshxre;
phloat sinxim, cosxim;
int inf;
sinhxre = sinh(xre);
coshxre = cosh(xre);
sincos(xim, &sinxim, &cosxim);
*yre = sinhxre * cosxim;
if ((inf = p_isinf(*yre)) != 0) {
if (flags.f.range_error_ignore)
*yre = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
*yim = coshxre * sinxim;
if ((inf = p_isinf(*yim)) != 0) {
if (flags.f.range_error_ignore)
*yim = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
return ERR_NONE;
}
int docmd_sinh(arg_struct *arg) {
if (reg_x->type != TYPE_STRING) {
vartype *v;
int err = map_unary(reg_x, &v, mappable_sinh_r, mappable_sinh_c);
if (err == ERR_NONE)
unary_result(v);
return err;
} else
return ERR_ALPHA_DATA_IS_INVALID;
}
int docmd_stoel(arg_struct *arg) {
vartype *m;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type != TYPE_REALMATRIX && m->type != TYPE_COMPLEXMATRIX)
/* Should not happen, but could, as long as I don't implement
* matrix locking.
*/
return ERR_INVALID_TYPE;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) m;
int4 n = matedit_i * rm->columns + matedit_j;
if (reg_x->type == TYPE_REAL) {
rm->array->is_string[n] = 0;
rm->array->data[n] = ((vartype_real *) reg_x)->x;
return ERR_NONE;
} else if (reg_x->type == TYPE_STRING) {
vartype_string *s = (vartype_string *) reg_x;
int i;
rm->array->is_string[n] = 1;
phloat_length(rm->array->data[n]) = s->length;
for (i = 0; i < s->length; i++)
phloat_text(rm->array->data[n])[i] = s->text[i];
return ERR_NONE;
} else
return ERR_INVALID_TYPE;
} else /* m->type == TYPE_COMPLEXMATRIX */ {
vartype_complexmatrix *cm = (vartype_complexmatrix *) m;
int4 n = matedit_i * cm->columns + matedit_j;
if (reg_x->type == TYPE_REAL) {
cm->array->data[2 * n] = ((vartype_real *) reg_x)->x;
cm->array->data[2 * n + 1] = 0;
return ERR_NONE;
} else if (reg_x->type == TYPE_COMPLEX) {
vartype_complex *c = (vartype_complex *) reg_x;
cm->array->data[2 * n] = c->re;
cm->array->data[2 * n + 1] = c->im;
return ERR_NONE;
} else
return ERR_INVALID_TYPE;
}
}
int docmd_stoij(arg_struct *arg) {
vartype *m;
phloat x, y;
int4 i, j;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_x->type != TYPE_REAL)
return ERR_INVALID_TYPE;
if (reg_y->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_y->type != TYPE_REAL)
return ERR_INVALID_TYPE;
x = ((vartype_real *) reg_x)->x;
if (x <= -2147483648.0 || x >= 2147483648.0)
return ERR_DIMENSION_ERROR;
j = to_int4(x);
if (j < 0)
j = -j;
y = ((vartype_real *) reg_y)->x;
if (y <= -2147483648.0 || y >= 2147483648.0)
return ERR_DIMENSION_ERROR;
i = to_int4(y);
if (i < 0)
i = -i;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) m;
if (i == 0 || i > rm->rows || j == 0 || j > rm->columns)
return ERR_DIMENSION_ERROR;
} else if (m->type == TYPE_COMPLEXMATRIX) {
vartype_complexmatrix *cm = (vartype_complexmatrix *) m;
if (i == 0 || i > cm->rows || j == 0 || j > cm->columns)
return ERR_DIMENSION_ERROR;
} else
/* Should not happen, but could, as long as I don't implement
* matrix locking. */
return ERR_INVALID_TYPE;
matedit_i = i - 1;
matedit_j = j - 1;
return ERR_NONE;
}
static int mappable_tanh_r(phloat x, phloat *y) {
*y = tanh(x);
return ERR_NONE;
}
static int mappable_tanh_c(phloat xre, phloat xim, phloat *yre, phloat *yim) {
phloat sinhxre, coshxre;
phloat sinxim, cosxim;
phloat re_sinh, re_cosh, im_sinh, im_cosh, abs_cosh;
int inf;
sinhxre = sinh(xre);
coshxre = cosh(xre);
sincos(xim, &sinxim, &cosxim);
re_sinh = sinhxre * cosxim;
im_sinh = coshxre * sinxim;
re_cosh = coshxre * cosxim;
im_cosh = sinhxre * sinxim;
abs_cosh = hypot(re_cosh, im_cosh);
if (abs_cosh == 0) {
if (flags.f.range_error_ignore) {
*yre = re_sinh * im_sinh + re_cosh * im_cosh > 0 ? POS_HUGE_PHLOAT
: NEG_HUGE_PHLOAT;
*yim = im_sinh * re_cosh - re_sinh * im_cosh > 0 ? POS_HUGE_PHLOAT
: NEG_HUGE_PHLOAT;
} else
return ERR_OUT_OF_RANGE;
}
*yre = (re_sinh * re_cosh + im_sinh * im_cosh) / abs_cosh / abs_cosh;
if ((inf = p_isinf(*yre)) != 0) {
if (flags.f.range_error_ignore)
*yre = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
*yim = (im_sinh * re_cosh - re_sinh * im_cosh) / abs_cosh / abs_cosh;
if ((inf = p_isinf(*yim)) != 0) {
if (flags.f.range_error_ignore)
*yim = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
return ERR_NONE;
}
int docmd_tanh(arg_struct *arg) {
if (reg_x->type != TYPE_STRING) {
vartype *v;
int err = map_unary(reg_x, &v, mappable_tanh_r, mappable_tanh_c);
if (err == ERR_NONE)
unary_result(v);
return err;
} else
return ERR_ALPHA_DATA_IS_INVALID;
}
int docmd_trans(arg_struct *arg) {
if (reg_x->type == TYPE_REALMATRIX) {
vartype_realmatrix *src = (vartype_realmatrix *) reg_x;
vartype_realmatrix *dst;
int4 rows = src->rows;
int4 columns = src->columns;
int4 i, j;
dst = (vartype_realmatrix *) new_realmatrix(columns, rows);
if (dst == NULL)
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < rows; i++)
for (j = 0; j < columns; j++) {
int4 n1 = i * columns + j;
int4 n2 = j * rows + i;
dst->array->is_string[n2] = src->array->is_string[n1];
dst->array->data[n2] = src->array->data[n1];
}
unary_result((vartype *) dst);
return ERR_NONE;
} else if (reg_x->type == TYPE_COMPLEXMATRIX) {
vartype_complexmatrix *src = (vartype_complexmatrix *) reg_x;
vartype_complexmatrix *dst;
int4 rows = src->rows;
int4 columns = src->columns;
int4 i, j;
dst = (vartype_complexmatrix *) new_complexmatrix(columns, rows);
if (dst == NULL)
return ERR_INSUFFICIENT_MEMORY;
for (i = 0; i < rows; i++)
for (j = 0; j < columns; j++) {
int4 n1 = 2 * (i * columns + j);
int4 n2 = 2 * (j * rows + i);
dst->array->data[n2] = src->array->data[n1];
dst->array->data[n2 + 1] = src->array->data[n1 + 1];
}
unary_result((vartype *) dst);
return ERR_NONE;
} else if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
else
return ERR_INVALID_TYPE;
}
int docmd_wrap(arg_struct *arg) {
flags.f.grow = 0;
return ERR_NONE;
}
int docmd_x_swap(arg_struct *arg) {
vartype *v;
int err = generic_rcl(arg, &v);
if (err != ERR_NONE)
return err;
err = generic_sto(arg, 0);
if (err != ERR_NONE)
free_vartype(v);
else {
free_vartype(reg_x);
reg_x = v;
if (flags.f.trace_print && flags.f.printer_exists)
docmd_prx(NULL);
}
return err;
}
#define DIR_LEFT 0
#define DIR_RIGHT 1
#define DIR_UP 2
#define DIR_DOWN 3
static int matedit_move(int direction) {
vartype *m, *v;
vartype_realmatrix *rm;
vartype_complexmatrix *cm;
int4 rows, columns, new_i, new_j, old_n, new_n;
int edge_flag = 0;
int end_flag = 0;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type == TYPE_REALMATRIX) {
rm = (vartype_realmatrix *) m;
rows = rm->rows;
columns = rm->columns;
} else if (m->type == TYPE_COMPLEXMATRIX) {
cm = (vartype_complexmatrix *) m;
rows = cm->rows;
columns = cm->columns;
} else
return ERR_INVALID_TYPE;
if (!disentangle(m))
return ERR_INSUFFICIENT_MEMORY;
new_i = matedit_i;
new_j = matedit_j;
switch (direction) {
case DIR_LEFT:
if (--new_j < 0) {
edge_flag = 1;
new_j = columns - 1;
if (--new_i < 0) {
end_flag = 1;
new_i = rows - 1;
}
}
break;
case DIR_RIGHT:
if (++new_j >= columns) {
edge_flag = 1;
new_j = 0;
if (++new_i >= rows) {
end_flag = 1;
if (flags.f.grow) {
int err;
if (matedit_mode == 2)
err = dimension_array_ref(matedit_x,
rows + 1, columns);
else
err = dimension_array(matedit_name, matedit_length,
rows + 1, columns);
if (err != ERR_NONE)
return err;
new_i = rows++;
} else
new_i = 0;
}
}
break;
case DIR_UP:
if (--new_i < 0) {
edge_flag = 1;
new_i = rows - 1;
if (--new_j < 0) {
end_flag = 1;
new_j = columns - 1;
}
}
break;
case DIR_DOWN:
if (++new_i >= rows) {
edge_flag = 1;
new_i = 0;
if (++new_j >= columns) {
end_flag = 1;
new_j = 0;
}
}
break;
}
old_n = matedit_i * columns + matedit_j;
new_n = new_i * columns + new_j;
if (m->type == TYPE_REALMATRIX) {
if (old_n != new_n) {
if (rm->array->is_string[new_n])
v = new_string(phloat_text(rm->array->data[new_n]),
phloat_length(rm->array->data[new_n]));
else
v = new_real(rm->array->data[new_n]);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
}
if (reg_x->type == TYPE_REAL) {
rm->array->is_string[old_n] = 0;
rm->array->data[old_n] = ((vartype_real *) reg_x)->x;
} else if (reg_x->type == TYPE_STRING) {
vartype_string *s = (vartype_string *) reg_x;
int i;
rm->array->is_string[old_n] = 1;
phloat_length(rm->array->data[old_n]) = s->length;
for (i = 0; i < s->length; i++)
phloat_text(rm->array->data[old_n])[i] = s->text[i];
} else {
free_vartype(v);
return ERR_INVALID_TYPE;
}
} else /* m->type == TYPE_COMPLEXMATRIX */ {
if (old_n != new_n) {
v = new_complex(cm->array->data[2 * new_n],
cm->array->data[2 * new_n + 1]);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
}
if (reg_x->type == TYPE_REAL) {
cm->array->data[2 * old_n] = ((vartype_real *) reg_x)->x;
cm->array->data[2 * old_n + 1] = 0;
} else if (reg_x->type == TYPE_COMPLEX) {
vartype_complex *c = (vartype_complex *) reg_x;
cm->array->data[2 * old_n] = c->re;
cm->array->data[2 * old_n + 1] = c->im;
} else {
free_vartype(v);
return ERR_INVALID_TYPE;
}
}
matedit_i = new_i;
matedit_j = new_j;
flags.f.matrix_edge_wrap = edge_flag;
flags.f.matrix_end_wrap = end_flag;
if (old_n != new_n) {
free_vartype(reg_x);
reg_x = v;
}
mode_disable_stack_lift = true;
if (flags.f.trace_print && flags.f.printer_enable)
docmd_prx(NULL);
return ERR_NONE;
}
int docmd_left(arg_struct *arg) {
return matedit_move(DIR_LEFT);
}
int docmd_up(arg_struct *arg) {
return matedit_move(DIR_UP);
}
int docmd_down(arg_struct *arg) {
return matedit_move(DIR_DOWN);
}
int docmd_right(arg_struct *arg) {
return matedit_move(DIR_RIGHT);
}
int docmd_percent_ch(arg_struct *arg) {
phloat x, y, r;
int inf;
vartype *v;
if (reg_x->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_x->type != TYPE_REAL)
return ERR_INVALID_TYPE;
if (reg_y->type == TYPE_STRING)
return ERR_ALPHA_DATA_IS_INVALID;
if (reg_y->type != TYPE_REAL)
return ERR_INVALID_TYPE;
x = ((vartype_real *) reg_x)->x;
y = ((vartype_real *) reg_y)->x;
if (y == 0)
return ERR_DIVIDE_BY_0;
r = (x - y) / y * 100;
if ((inf = p_isinf(r)) != 0) {
if (flags.f.range_error_ignore)
r = inf < 0 ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
else
return ERR_OUT_OF_RANGE;
}
v = new_real(r);
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
/* Binary function, but unary result, like % */
unary_result(v);
return ERR_NONE;
}
static vartype *matx_v;
static void matx_completion(int error, vartype *res) {
if (error != ERR_NONE) {
free_vartype(matx_v);
return;
}
store_var("MATX", 4, res);
matedit_prev_appmenu = MENU_MATRIX_SIMQ;
set_menu(MENULEVEL_APP, MENU_MATRIX_EDIT1);
/* NOTE: no need to use set_menu_return_err() here, since the MAT[ABX]
* commands can only be invoked from the SIMQ menu; the SIMQ menu
* has no exit callback, so leaving it never fails.
*/
set_appmenu_exitcallback(1);
if (res->type == TYPE_REALMATRIX) {
vartype_realmatrix *m = (vartype_realmatrix *) res;
vartype_real *v = (vartype_real *) matx_v;
v->x = m->array->data[0];
} else {
vartype_complexmatrix *m = (vartype_complexmatrix *) res;
vartype_complex *v = (vartype_complex *) matx_v;
v->re = m->array->data[0];
v->im = m->array->data[1];
}
free_vartype(reg_x);
reg_x = matx_v;
matedit_mode = 3;
matedit_length = 4;
matedit_name[0] = 'M';
matedit_name[1] = 'A';
matedit_name[2] = 'T';
matedit_name[3] = 'X';
matedit_i = 0;
matedit_j = 0;
}
static int matabx(int which) {
vartype *mat, *v;
switch (which) {
case 0:
mat = recall_var("MATA", 4);
break;
case 1:
mat = recall_var("MATB", 4);
break;
case 2: {
vartype *mata, *matb;
mata = recall_var("MATA", 4);
if (mata == NULL)
return ERR_NONEXISTENT;
if (mata->type != TYPE_REALMATRIX
&& mata->type != TYPE_COMPLEXMATRIX)
return ERR_INVALID_TYPE;
matb = recall_var("MATB", 4);
if (matb == NULL)
return ERR_NONEXISTENT;
if (matb->type != TYPE_REALMATRIX
&& matb->type != TYPE_COMPLEXMATRIX)
return ERR_INVALID_TYPE;
if (mata->type == TYPE_REALMATRIX && matb->type == TYPE_REALMATRIX)
matx_v = new_real(0);
else
matx_v = new_complex(0, 0);
if (matx_v == NULL)
return ERR_INSUFFICIENT_MEMORY;
return linalg_div(matb, mata, matx_completion);
}
}
if (mat->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm = (vartype_realmatrix *) mat;<|fim▁hole|> if (rm->array->is_string[0])
v = new_string(phloat_text(rm->array->data[0]),
phloat_length(rm->array->data[0]));
else
v = new_real(rm->array->data[0]);
} else {
vartype_complexmatrix *cm = (vartype_complexmatrix *) mat;
v = new_complex(cm->array->data[0], cm->array->data[1]);
}
if (v == NULL)
return ERR_INSUFFICIENT_MEMORY;
matedit_prev_appmenu = MENU_MATRIX_SIMQ;
set_menu(MENULEVEL_APP, MENU_MATRIX_EDIT1);
/* NOTE: no need to use set_menu_return_err() here, since the MAT[ABX]
* commands can only be invoked from the SIMQ menu; the SIMQ menu
* has no exit callback, so leaving it never fails.
*/
set_appmenu_exitcallback(1);
free_vartype(reg_x);
reg_x = v;
matedit_mode = 3;
matedit_length = 4;
matedit_name[0] = 'M';
matedit_name[1] = 'A';
matedit_name[2] = 'T';
matedit_name[3] = which == 0 ? 'A' : 'B';
matedit_i = 0;
matedit_j = 0;
return ERR_NONE;
}
int docmd_mata(arg_struct *arg) {
return matabx(0);
}
int docmd_matb(arg_struct *arg) {
return matabx(1);
}
int docmd_matx(arg_struct *arg) {
return matabx(2);
}
int docmd_simq(arg_struct *arg) {
vartype *m, *mata, *matb, *matx;
int4 dim;
int err;
if (arg->type != ARGTYPE_NUM)
return ERR_INVALID_TYPE;
dim = arg->val.num;
if (dim <= 0)
return ERR_DIMENSION_ERROR;
m = recall_var("MATA", 4);
if (m != NULL && (m->type == TYPE_REALMATRIX || m->type == TYPE_COMPLEXMATRIX)) {
mata = dup_vartype(m);
if (mata == NULL)
return ERR_INSUFFICIENT_MEMORY;
err = dimension_array_ref(mata, dim, dim);
if (err != ERR_NONE)
goto abort_and_free_a;
} else {
mata = new_realmatrix(dim, dim);
if (mata == NULL)
return ERR_INSUFFICIENT_MEMORY;
}
m = recall_var("MATB", 4);
if (m != NULL && (m->type == TYPE_REALMATRIX || m->type == TYPE_COMPLEXMATRIX)) {
matb = dup_vartype(m);
if (matb == NULL) {
err = ERR_INSUFFICIENT_MEMORY;
goto abort_and_free_a;
}
err = dimension_array_ref(matb, dim, 1);
if (err != ERR_NONE)
goto abort_and_free_a_b;
} else {
matb = new_realmatrix(dim, 1);
if (matb == NULL) {
err = ERR_INSUFFICIENT_MEMORY;
goto abort_and_free_a;
}
}
m = recall_var("MATX", 4);
if (m != NULL && (m->type == TYPE_REALMATRIX || m->type == TYPE_COMPLEXMATRIX)) {
matx = dup_vartype(m);
if (matx == NULL) {
err = ERR_INSUFFICIENT_MEMORY;
goto abort_and_free_a_b;
}
err = dimension_array_ref(matx, dim, 1);
if (err != ERR_NONE)
goto abort_and_free_a_b_x;
} else {
matx = new_realmatrix(dim, 1);
if (matx == NULL) {
err = ERR_INSUFFICIENT_MEMORY;
goto abort_and_free_a_b;
}
}
err = set_menu_return_err(MENULEVEL_APP, MENU_MATRIX_SIMQ);
if (err != ERR_NONE) {
/* Didn't work; we're stuck in the matrix editor
* waiting for the user to put something valid into X.
* (Then again, how can anyone issue the SIMQ command if
* they're in the matrix editor? SIMQ has the 'hidden'
* command property. Oh, well, better safe than sorry...)
*/
abort_and_free_a_b_x:
free_vartype(matx);
abort_and_free_a_b:
free_vartype(matb);
abort_and_free_a:
free_vartype(mata);
return err;
}
store_var("MATX", 4, matx);
store_var("MATB", 4, matb);
store_var("MATA", 4, mata);
return ERR_NONE;
}
static int max_min_helper(int do_max) {
vartype *m;
vartype_realmatrix *rm;
phloat max_or_min_value = do_max ? NEG_HUGE_PHLOAT : POS_HUGE_PHLOAT;
int4 i, max_or_min_index = 0;
vartype *new_x, *new_y;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type != TYPE_REALMATRIX)
return ERR_INVALID_TYPE;
rm = (vartype_realmatrix *) m;
for (i = matedit_i; i < rm->rows; i++) {
int4 index = i * rm->columns + matedit_j;
phloat e;
if (rm->array->is_string[index])
return ERR_ALPHA_DATA_IS_INVALID;
e = rm->array->data[index];
if (do_max ? e >= max_or_min_value : e <= max_or_min_value) {
max_or_min_value = e;
max_or_min_index = i;
}
}
new_x = new_real(max_or_min_value);
if (new_x == NULL)
return ERR_INSUFFICIENT_MEMORY;
new_y = new_real(max_or_min_index + 1);
if (new_y == NULL) {
free_vartype(new_x);
return ERR_INSUFFICIENT_MEMORY;
}
recall_two_results(new_x, new_y);
return ERR_NONE;
}
int docmd_max(arg_struct *arg) {
return max_min_helper(1);
}
int docmd_min(arg_struct *arg) {
return max_min_helper(0);
}
int docmd_find(arg_struct *arg) {
vartype *m;
if (reg_x->type == TYPE_REALMATRIX || reg_x->type == TYPE_COMPLEXMATRIX)
return ERR_INVALID_TYPE;
switch (matedit_mode) {
case 0:
return ERR_NONEXISTENT;
case 1:
case 3:
m = recall_var(matedit_name, matedit_length);
break;
case 2:
m = matedit_x;
break;
default:
return ERR_INTERNAL_ERROR;
}
if (m == NULL)
return ERR_NONEXISTENT;
if (m->type == TYPE_REALMATRIX) {
vartype_realmatrix *rm;
int4 i, j, p = 0;
if (reg_x->type == TYPE_COMPLEX)
return ERR_NO;
rm = (vartype_realmatrix *) m;
if (reg_x->type == TYPE_REAL) {
phloat d = ((vartype_real *) reg_x)->x;
for (i = 0; i < rm->rows; i++)
for (j = 0; j < rm->columns; j++)
if (!rm->array->is_string[p] && rm->array->data[p] == d) {
matedit_i = i;
matedit_j = j;
return ERR_YES;
} else
p++;
} else /* reg_x->type == TYPE_STRING */ {
vartype_string *s = (vartype_string *) reg_x;
for (i = 0; i < rm->rows; i++)
for (j = 0; j < rm->columns; j++)
if (rm->array->is_string[p]
&& string_equals(s->text, s->length,
phloat_text(rm->array->data[p]),
phloat_length(rm->array->data[p]))) {
matedit_i = i;
matedit_j = j;
return ERR_YES;
} else
p++;
}
} else /* m->type == TYPE_COMPLEXMATRIX */ {
vartype_complexmatrix *cm;
int4 i, j, p = 0;
phloat re, im;
if (reg_x->type != TYPE_COMPLEX)
return ERR_NO;
cm = (vartype_complexmatrix *) m;
re = ((vartype_complex *) reg_x)->re;
im = ((vartype_complex *) reg_x)->im;
for (i = 0; i < cm->rows; i++)
for (j = 0; j < cm->columns; j++)
if (cm->array->data[p] == re && cm->array->data[p + 1] == im) {
matedit_i = i;
matedit_j = j;
return ERR_YES;
} else
p += 2;
}
return ERR_NO;
}
int docmd_xrom(arg_struct *arg) {
return ERR_NONEXISTENT;
}<|fim▁end|> | |
<|file_name|>test_upstart.py<|end_file_name|><|fim▁begin|>import collections
from mock import MagicMock
from mock import call
from mock import patch
from honcho.test.helpers import TestCase
from honcho.export.upstart import Export
FakeProcess = collections.namedtuple('FakeProcess', 'name')
FIX_1PROC = [FakeProcess('web.1')]
FIX_NPROC = [FakeProcess('web.1'),
FakeProcess('worker.1'),
FakeProcess('worker.2')]<|fim▁hole|>
class TestExportUpstart(TestCase):
def setUp(self): # noqa
self.export = Export()
self.master = MagicMock()
self.process_master = MagicMock()
self.process = MagicMock()
def _get_template(name):
if name.endswith('process_master.conf'):
return self.process_master
elif name.endswith('process.conf'):
return self.process
elif name.endswith('master.conf'):
return self.master
else:
raise RuntimeError("tests don't know about that template")
self.get_template_patcher = patch.object(Export, 'get_template')
self.get_template = self.get_template_patcher.start()
self.get_template.side_effect = _get_template
def tearDown(self): # noqa
self.get_template_patcher.stop()
def test_render_master(self):
out = self.export.render(FIX_1PROC, {'app': 'elephant'})
self.assertIn(('elephant.conf', self.master.render.return_value),
out)
self.master.render.assert_called_once_with({'app': 'elephant'})
def test_render_process_master(self):
out = self.export.render(FIX_1PROC, {'app': 'elephant'})
self.assertIn(('elephant-web.conf',
self.process_master.render.return_value),
out)
expected = {'app': 'elephant',
'group_name': 'elephant-web'}
self.process_master.render.assert_called_once_with(expected)
def test_render_process(self):
out = self.export.render(FIX_1PROC, {'app': 'elephant'})
self.assertIn(('elephant-web-1.conf',
self.process.render.return_value),
out)
expected = {'app': 'elephant',
'group_name': 'elephant-web',
'process': FIX_1PROC[0]}
self.process.render.assert_called_once_with(expected)
def test_render_multiple_process_groups(self):
out = self.export.render(FIX_NPROC, {'app': 'elephant'})
self.assertIn(('elephant-web.conf',
self.process_master.render.return_value),
out)
self.assertIn(('elephant-worker.conf',
self.process_master.render.return_value),
out)
expected = [call({'app': 'elephant',
'group_name': 'elephant-web'}),
call({'app': 'elephant',
'group_name': 'elephant-worker'})]
self.assertEqual(expected, self.process_master.render.call_args_list)
def test_render_multiple_processes(self):
out = self.export.render(FIX_NPROC, {'app': 'elephant'})
self.assertIn(('elephant-web-1.conf',
self.process.render.return_value),
out)
self.assertIn(('elephant-worker-1.conf',
self.process.render.return_value),
out)
self.assertIn(('elephant-worker-2.conf',
self.process.render.return_value),
out)
expected = [call({'app': 'elephant',
'group_name': 'elephant-web',
'process': FIX_NPROC[0]}),
call({'app': 'elephant',
'group_name': 'elephant-worker',
'process': FIX_NPROC[1]}),
call({'app': 'elephant',
'group_name': 'elephant-worker',
'process': FIX_NPROC[2]})]
self.assertEqual(expected, self.process.render.call_args_list)<|fim▁end|> | |
<|file_name|>task_plot3d.py<|end_file_name|><|fim▁begin|>from taskinit import *
from mpl_toolkits.mplot3d import axes3d, Axes3D
import matplotlib.pyplot as plt
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.cm as cm
import numpy as np
from pylab import ion,ioff
# plot3d is released under a BSD 3-Clause License
# See LICENSE for details
# HISTORY:
# 1.0 12Jul2014 Initial version.
# 1.1 04Aug2014 Fixed up time axis problem; correlation selection improved.
# 1.2 15Aug2014 Added uvrange selection.
# 1.3 25Aug2014 Bug fix: removed vmin from plot_surface.
# 1.4 01Oct2015 Added explicit handling for linear feed basis.
# 1.5 24Oct2016 Minor help file fixes, no change to code
#
def plot3d(vis,fid,datacolumn,corr,uvrange,plotall,spw,timecomp,chancomp,clipamp,outpng):
#
# Task plot3d
#
# Quickly inspect data for RFI by plotting time vs frequency vs amplitude
# Christopher A. Hales
#
# Version 1.5 (tested with CASA Version 4.7.0)
# 24 October 2016
casalog.origin('plot3d')
# channel to frequency conversion
tb.open(vis+'/SPECTRAL_WINDOW')
vtble=tb.getcol('CHAN_FREQ')
tb.close
nspw=vtble.shape[1]
# Get mapping between correlation string and number.
# Assume they don't change throughout observation.
# This is clunky...
tb.open(vis+'/DATA_DESCRIPTION')
if plotall:
# Get id of a spw in the data, just grab first one within the first
# scan on the chosen field so that some statistics can be obtained.
# Note: I won't assume that spw specifies data_desc_id in the main table, even
# though in most cases it probably does. Probably overkill given the lack
# of checks done elsewhere in this code...
# later we will gather scan information by looking at
# a single spw and assuming it represents all spw's
ms.open(vis)
ms.msselect({'field':str(fid)})
tempddid=ms.getdata(["DATA_DESC_ID"])['data_desc_id'][0]
ms.close
spw=tb.getcell('SPECTRAL_WINDOW_ID',tempddid)
polid=tb.getcell('POLARIZATION_ID',tempddid)
else:
temptb=tb.query('SPECTRAL_WINDOW_ID='+str(spw))
polid=temptb.getcell('POLARIZATION_ID')
tb.close
tb.open(vis+'/POLARIZATION')
npol=tb.getcell('NUM_CORR',polid)
tb.close
if npol == 2:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'LL' or corr == 'YY':
corrID = 1
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
elif npol == 4:
if corr == 'RR' or corr == 'XX':
corrID = 0
elif corr == 'RL' or corr == 'XY':
corrID = 1
elif corr == 'LR' or corr == 'YX':
corrID = 2
elif corr == 'LL' or corr == 'YY':
corrID = 3
else:
casalog.post('*** plot3d error: selected correlation doesn\'t exist. Terminating.', 'ERROR')
return
else:
casalog.post('*** plot3d error: see the code, this is a weird error! Terminating.', 'ERROR')
corrSTR = corr
corr = corrID
# calculate number of effective channels per spw
# I assume that the end channels of each spw have been flagged.
# Force individual channels to remain at either end of spw,
# in order to ensure amplitudes are zero in between
# non-contiguous spw's. This will also ensure that it is
# easier to see spw boundaries in between contiguous spw's.
nchan = int(np.floor((vtble.shape[0]-2)/float(chancomp)))+2
# guard against the user inputting infinite chancomp
if nchan == 2:
nchan = 3
if plotall:
# I don't make any effort to set the amplitude to
# zero in the gaps between spw's (ie if spw's are not
# contiguous) because I will assume that flagging of
# spw edge channels has already taken place. Thus
# there is no need to pad spw's with extra channels
# if they happen to sit next to a gap in frequency
# coverage. For a more general code this would not
# be appropriate.
N=np.zeros(nchan*nspw)
t=0
for i in range(nspw):
# the following copied from single-spw "else" part below
k=0
# 1st channel in spw
N[t] = vtble[k,i]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,i]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,i]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,i]/1e6
t += 1
## TESTING: get regular channel data to compare
#Q=np.zeros([vtble.shape[0]*nspw])
#t=0
#for i in range(nspw):
# for k in range(vtble.shape[0]):
# Q[t] = vtble[k,i]/1e6
# t += 1
else:
N=np.zeros(nchan)
t=0
k=0
# 1st channel in spw
N[t] = vtble[k,spw]/1e6
t += 1
k += 1
# middle channels
# check if we are in the last block
while k+2*chancomp-1 <= vtble.shape[0]-2:
for h in range(chancomp):
N[t] = N[t] + vtble[k+h,spw]
N[t] = N[t]/1e6/chancomp
t += 1
k += chancomp
# for the last block, just combine everything remaining
for h in range(k,vtble.shape[0]-1):
N[t] = N[t] + vtble[h,spw]
N[t] = N[t]/1e6/len(range(k,vtble.shape[0]-1))
t += 1
# last channel in spw
N[t] = vtble[vtble.shape[0]-1,spw]/1e6
## TESTING: get regular channel data to compare
#Q=np.zeros(vtble.shape[0])
#t=0
#for k in range(vtble.shape[0]):
# Q[t] = vtble[k,spw]/1e6
# t += 1
ms.open(vis)
# assume time is same for each spw
# this is not the most efficient place in the code for this bit, meh
ms.reset()
ms.msselect({'field':str(fid),'spw':str(spw)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
# get the raw timestamps
Z=ms.getdata('time')['time']
# get the unique timestamps and nbaselines for each timestamp
# (don't assume the same baselines are available in each time step)
temptime = np.unique(Z)
nbaselines = []
for i in range(len(temptime)):
nbaselines.append(len(Z[Z==temptime[i]]))
# Get scan summary in prep for calculating time steps.
# Note that CASA currently reports all spw's in the
# scan summary, rather than the 1 selected above. meh
scan_summary = ms.getscansummary()
scan_list = []
for scan in scan_summary:
if scan_summary[scan]['0']['FieldId'] == fid:
scan_list.append(int(scan))
scan_list.sort()
# get integration time in minutes; assume it doesn't change in
# any way throughout the observation, ie between spw's, etc
inttime=scan_summary[str(scan_list[0])]['0']['IntegrationTime'] / 60.0
# Calculate number of true time steps per scan.
# In the code below, a dummy timestep will be added at each
# end of each scan to ensure amplitudes are zero in between
# non-contiguous scans. This will also ensure that it is
# easier to see scan boundaries in between contiguous
# scans. The 1st and last timestamp do not contribute to
# the time compression stuff.
# Also calculate effective time steps per scan, so that
# I can call the variable effntime...!
scan_ntime = []
scan_effntime = []
t = 0
for scan in scan_list:
i = 0
bcounter = 0
while bcounter < scan_summary[str(scan)]['0']['nRow']:
bcounter += nbaselines[t]
i += 1
t += 1
scan_ntime.append(i)
tempvar=int(np.floor(i/float(timecomp)))+2
# guard against the user inputting infinite timecomp
if tempvar == 2:
scan_effntime.append(tempvar+1)
else:
scan_effntime.append(tempvar)
ntime = sum(scan_effntime)
# go through each scan and add a dummy timestep before
# and after each one, with time difference equal to
# one ten thousandth of a time step (make this
# small so that a slope doesn't show up in the plot)
intdividefactor=10000.0
M=np.zeros(ntime)
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
for h in range(timecomp):
if checkfirst:
t+=1
M[t] += temptime[sum(scan_ntime[:d])+k+h]
if checkfirst:
M[t-1] = M[t]-inttime/intdividefactor
checkfirst=False
M[t] = M[t]/timecomp
t += 1
k += timecomp
for h in range(scan_ntime[d]-k):
if checkfirst:
t+=1
M[t] += temptime[sum(scan_ntime[:d])+k+h]
if checkfirst:
M[t-1] = M[t]-inttime/intdividefactor
checkfirst=False
M[t] = M[t]/len(range(scan_ntime[d]-k))
t+=1
M[t] = M[t-1]+inttime/intdividefactor
t+=1
# time is in seconds from zero modified Julian date...not very aesthetic
# subtract off the starting time and convert to minutes
M=(M-M.min())/60
# For each gap between scans, modify the data so it looks like only 5
# integration times have passed. For example, this will make it easier
# to look at your secondary calibrator data. This will of course make
# your time axis look weird...but it can improve 3D plot rendering speed
for i in range(len(scan_list)-1):
i += 1
tempval = M[sum(scan_effntime[0:i])] - M[sum(scan_effntime[0:i])-1]
M[sum(scan_effntime[0:i]):] = M[sum(scan_effntime[0:i]):] - tempval + 5*inttime
# go through each spectral window and extract amplitude data
if plotall:
for i in range(nspw):
ms.reset()
ms.msselect({'field':str(fid),'spw':str(i)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
# visibility data (X,Y,Z) where
# X=4 (RR,RL,LR,LL) or (XX,XY,YX,YY)
# Y=number of channels
# Z=number of rows (visibilities/4)
tempdata=ms.getdata(datacolumn)
tempflag=ms.getdata('flag')
# true flag means I should flag it, so switch to ensure good points remain
tempflag=np.invert(tempflag['flag'][corr])
# select amplitude data associated with requested correlation
# and ensure any existing flagged points are set to zero
P1 = np.multiply(abs(tempdata[datacolumn][corr]),tempflag)
# time + baseline compression
P2=np.zeros([P1.shape[0],ntime])
# loop over channels
# yes, this is inefficient, but hopefully easier to understand
for s in range(P1.shape[0]):
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
if checkfirst:
t+=1
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+timecomp])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t += 1
k += timecomp
if checkfirst:
t+=1
tempvar=len(range(scan_ntime[d]-k))
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+tempvar])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t+=1
P2[s,t] = 0.0
t+=1
# channel compression
# for clarity, don't combine this step with the
# time+baseline compression above
P3=np.zeros([nchan,ntime])
# 1st channel in spw
t=0
k=0
P3[t] = P2[t]
t += 1
k += 1
# middle channels
while k+2*chancomp-1 <= P2.shape[0]-2:
for h in range(chancomp):
P3[t] = np.maximum(P3[t],P2[k+h])
t += 1
k += chancomp
for h in range(k,P2.shape[0]-1):
P3[t] = np.maximum(P3[t],P2[h])
t += 1
# last channel in spw
P3[t] = P2[P2.shape[0]-1]
if i == 0:
P=P3
else:
P=np.concatenate((P,P3),axis=0)
# not needed because of selection above
# spectral window, with same number of rows as Z above
#sdata=ms.getdata('data_desc_id')
# not needed because of selection above
# field ID
#fdata=ms.getdata('field_id')
else:
# just copy the important steps from above
ms.reset()
ms.msselect({'field':str(fid),'spw':str(spw)})
if len(uvrange) > 0:
ms.msselect({'uvdist':uvrange})
tempdata=ms.getdata(datacolumn)
tempflag=ms.getdata('flag')
tempflag=np.invert(tempflag['flag'][corr])
P1=np.multiply(abs(tempdata[datacolumn][corr]),tempflag)
# time + baseline compression
P2=np.zeros([P1.shape[0],ntime])
# loop over channels
# yes, this is inefficient, but hopefully easier to understand
for s in range(P1.shape[0]):
t=0
for d in range(len(scan_list)):
checkfirst=True
k=0
while k+2*timecomp-1 <= scan_ntime[d]-1:
if checkfirst:
t+=1
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+timecomp])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:
P2[s,t-1] = 0.0
checkfirst=False
t += 1
k += timecomp
if checkfirst:
t+=1
tempvar=len(range(scan_ntime[d]-k))
P2[s,t] = max(P1[s,sum(nbaselines[:sum(scan_ntime[:d])+k]):sum(nbaselines[:sum(scan_ntime[:d])+k+tempvar])])
if clipamp>=0:
P2[s,t] = min(clipamp,P2[s,t])
if checkfirst:<|fim▁hole|> checkfirst=False
t+=1
P2[s,t] = 0.0
t+=1
# channel compression
# for clarity, don't combine this step with the
# time+baseline compression above
P=np.zeros([nchan,ntime])
# 1st channel in spw
t=0
k=0
P[t] = P2[t]
t += 1
k += 1
# middle channels
while k+2*chancomp-1 <= P2.shape[0]-2:
for h in range(chancomp):
P[t] = np.maximum(P[t],P2[k+h])
t += 1
k += chancomp
for h in range(k,P2.shape[0]-1):
P[t] = np.maximum(P[t],P2[h])
t += 1
# last channel in spw
P[t] = P2[P2.shape[0]-1]
ms.close()
# clear memory, not needed any more
vtble=[]
Z=[]
P1=[]
P2=[]
P3=[]
tempdata=[]
tempflag=[]
# M=time, N=frequency , P=amplitude
M2D,N2D=np.meshgrid(M,N)
ion()
fig = plt.figure()
ax = Axes3D(fig)
ax.set_xlabel('time (mins)')
ax.set_ylabel('frequency (MHz)')
ax.set_zlabel('amplitude')
if len(uvrange) == 0:
uvrange='ALL'
if plotall:
plot_title='field:'+str(fid)+' corr:'+corrSTR+' column:'+datacolumn+' uvrange:'+uvrange
figname=vis.strip('.ms')+'_plot3d_fid'+str(fid)+'_corr'+corrSTR+'_'+datacolumn+\
'_uv'+uvrange+'_t'+str(timecomp)+'_c'+str(chancomp)
else:
plot_title='field:'+str(fid)+' corr:'+corrSTR+' spw:'+str(spw)+' column:'+datacolumn+' uvrange:'+uvrange
figname=vis.strip('.ms')+'_plot3d_fid'+str(fid)+'_corr'+corrSTR+'_spw'+str(spw)+'_'+datacolumn+\
'_uv'+uvrange+'_t'+str(timecomp)+'_c'+str(chancomp)
ax.set_title(plot_title)
#ax.set_zscale('log')
ax.plot_surface(M2D, N2D, P, rstride=1, cstride=1, cmap=cm.jet)
#if isinstance(plotfig,str):
# figname=plotfig
# plotfig=1
if outpng:
fig.savefig(figname)
ioff()<|fim▁end|> | P2[s,t-1] = 0.0 |
<|file_name|>getgroupmembershipforuser.test.js<|end_file_name|><|fim▁begin|>'use strict'
const tap = require('tap')
const ActiveDirectory = require('../index')
const config = require('./config')
const serverFactory = require('./mockServer')
const settings = require('./settings').getGroupMembershipForUser
tap.beforeEach((done, t) => {
serverFactory(function (err, server) {
if (err) return done(err)
const connectionConfig = config(server.port)
t.context.ad = new ActiveDirectory(connectionConfig)
t.context.server = server
done()
})
})
tap.afterEach((done, t) => {
if (t.context.server) t.context.server.close()
done()
})
tap.test('#getGroupMembershipForUser()', t => {
settings.users.forEach((user) => {
['dn', 'userPrincipalName', 'sAMAccountName'].forEach((attr) => {
const len = user.members.length
t.test(`should return ${len} groups for ${attr}`, t => {
t.context.ad.getGroupMembershipForUser(user[attr], function (err, groups) {
t.error(err)
t.true(groups.length >= user.members.length)
const groupNames = groups.map((g) => {
return g.cn
})
user.members.forEach((g) => {
t.true(groupNames.includes(g))
})
t.end()
})
})
})
})
t.test('should return empty groups if groupName doesn\'t exist', t => {
t.context.ad.getGroupMembershipForUser('!!!NON-EXISTENT GROUP!!!', function (err, groups) {
t.error(err)
t.type(groups, Array)
t.equal(groups.length, 0)
t.end()
})
})
t.test('should return default group attributes when not specified', t => {<|fim▁hole|> t.context.ad.getGroupMembershipForUser(user.userPrincipalName, function (err, groups) {
t.error(err)
t.ok(groups)
groups.forEach((g) => {
const keys = Object.keys(g)
defaultAttributes.forEach((attr) => {
t.true(keys.includes(attr))
})
})
t.end()
})
})
t.end()
})
tap.test('#getGroupMembershipForUser(opts)', t => {
t.test('should return only requested attributes', t => {
const opts = {
attributes: ['createTimeStamp']
}
const user = settings.users[0]
t.context.ad.getGroupMembershipForUser(opts, user.userPrincipalName, function (err, groups) {
t.error(err)
t.ok(groups)
t.true(groups.length >= user.members.length)
groups.forEach((g) => {
const keys = Object.keys(g)
keys.forEach((attr) => {
t.true(opts.attributes.includes(attr))
})
})
t.end()
})
})
t.end()
})<|fim▁end|> | const defaultAttributes = ['objectCategory', 'distinguishedName', 'cn', 'description']
const user = settings.users[0] |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from django.http import HttpResponse
from paste.models import Paste, Language
@csrf_exempt <|fim▁hole|>def add(request):
print "jojo"
if request.method == 'POST':
language = request.POST['language']
content = request.POST['content']
try:
lang = Language.objects.get(pk=language)
except:
print "lang not avalible", language
lang = Language.objects.get(pk='txt')
paste = Paste(content=content, language=lang)
paste.save()
paste = Paste.objects.latest()
return HttpResponse(paste.pk, content_type='text/plain')
else:
return redirect('/api')<|fim▁end|> | |
<|file_name|>binary_search.rs<|end_file_name|><|fim▁begin|>fn main() {
let nums = vec![1, 3, 5, 7, 9];
let find_me = 5;
let result = binary_search(&nums, find_me, 0, nums.len());
println!("Given Array: {:?}", nums);
match result {
Some(index) => println!("Searched for {} and found index {}.", find_me, index),
None => println!("Searched for {} but found no occurrence.", find_me),
}
}
fn binary_search(nums: &[i64], search_value: i64, left: usize, right: usize) -> Option<usize> {
let mut left: usize = left;<|fim▁hole|>
while left <= right {
let middle = (left + right) / 2;
if middle == nums.len() {
break;
}
if nums[middle] == search_value {
return Some(middle);
} else if nums[middle] < search_value {
left = middle + 1;
} else if nums[middle] > search_value && middle != 0 {
right = middle - 1;
} else {
break;
}
}
None
}<|fim▁end|> | let mut right: usize = right; |
<|file_name|>typeable.js<|end_file_name|><|fim▁begin|>/*[email protected]#typeable*/
var syn = require('./synthetic.js');
var typeables = [];
var __indexOf = [].indexOf || function (item) {
for (var i = 0, l = this.length; i < l; i++) {
if (i in this && this[i] === item) {
return i;
}
}
return -1;
};
syn.typeable = function (fn) {
if (__indexOf.call(typeables, fn) === -1) {
typeables.push(fn);
}
};
syn.typeable.test = function (el) {
for (var i = 0, len = typeables.length; i < len; i++) {
if (typeables[i](el)) {
return true;<|fim▁hole|> return false;
};
var type = syn.typeable;
var typeableExp = /input|textarea/i;
type(function (el) {
return typeableExp.test(el.nodeName);
});
type(function (el) {
return __indexOf.call([
'',
'true'
], el.getAttribute('contenteditable')) !== -1;
});<|fim▁end|> | }
} |
<|file_name|>linear_weighted_moving_average.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from pyti import catch_errors
from pyti.function_helper import fill_for_noncomputable_vals
from six.moves import range
def linear_weighted_moving_average(data, period):
"""
Linear Weighted Moving Average.
Formula:
LWMA = SUM(DATA[i]) * i / SUM(i)
"""
catch_errors.check_for_period_error(data, period)
idx_period = list(range(1, period+1))
lwma = [(sum([i * idx_period[data[idx-(period-1):idx+1].index(i)]
for i in data[idx-(period-1):idx+1]])) /
sum(range(1, len(data[idx+1-period:idx+1])+1)) for idx in range(period-1, len(data))]
lwma = fill_for_noncomputable_vals(data, lwma)
return lwma<|fim▁end|> | from __future__ import absolute_import |
<|file_name|>interfaces.py<|end_file_name|><|fim▁begin|># orm/interfaces.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
Contains various base classes used throughout the ORM.
Defines the now deprecated ORM extension classes as well
as ORM internals.
Other than the deprecated extensions, this module and the
classes within should be considered mostly private.
"""
from __future__ import absolute_import
from .. import exc as sa_exc, util, inspect
from ..sql import operators
from collections import deque
orm_util = util.importlater('sqlalchemy.orm', 'util')
collections = util.importlater('sqlalchemy.orm', 'collections')
__all__ = (
'AttributeExtension',
'EXT_CONTINUE',
'EXT_STOP',
'ExtensionOption',
'InstrumentationManager',
'LoaderStrategy',
'MapperExtension',
'MapperOption',
'MapperProperty',
'PropComparator',
'PropertyOption',
'SessionExtension',
'StrategizedOption',
'StrategizedProperty',
)
EXT_CONTINUE = util.symbol('EXT_CONTINUE')
EXT_STOP = util.symbol('EXT_STOP')
ONETOMANY = util.symbol('ONETOMANY')
MANYTOONE = util.symbol('MANYTOONE')
MANYTOMANY = util.symbol('MANYTOMANY')
from .deprecated_interfaces import AttributeExtension, \
SessionExtension, \
MapperExtension
NOT_EXTENSION = util.symbol('NOT_EXTENSION')
"""Symbol indicating an :class:`_InspectionAttr` that's
not part of sqlalchemy.ext.
Is assigned to the :attr:`._InspectionAttr.extension_type`
attibute.
"""
class _InspectionAttr(object):
"""A base class applied to all ORM objects that can be returned
by the :func:`.inspect` function.
The attributes defined here allow the usage of simple boolean
checks to test basic facts about the object returned.
While the boolean checks here are basically the same as using
the Python isinstance() function, the flags here can be used without
the need to import all of these classes, and also such that
the SQLAlchemy class system can change while leaving the flags
here intact for forwards-compatibility.
"""
is_selectable = False
"""Return True if this object is an instance of :class:`.Selectable`."""
is_aliased_class = False
"""True if this object is an instance of :class:`.AliasedClass`."""
is_instance = False
"""True if this object is an instance of :class:`.InstanceState`."""
is_mapper = False
"""True if this object is an instance of :class:`.Mapper`."""
is_property = False
"""True if this object is an instance of :class:`.MapperProperty`."""
is_attribute = False
"""True if this object is a Python :term:`descriptor`.
This can refer to one of many types. Usually a
:class:`.QueryableAttribute` which handles attributes events on behalf
of a :class:`.MapperProperty`. But can also be an extension type
such as :class:`.AssociationProxy` or :class:`.hybrid_property`.
The :attr:`._InspectionAttr.extension_type` will refer to a constant
identifying the specific subtype.
.. seealso::
:attr:`.Mapper.all_orm_descriptors`
"""
is_clause_element = False
"""True if this object is an instance of :class:`.ClauseElement`."""
extension_type = NOT_EXTENSION
"""The extension type, if any.
Defaults to :data:`.interfaces.NOT_EXTENSION`
.. versionadded:: 0.8.0
.. seealso::
:data:`.HYBRID_METHOD`
:data:`.HYBRID_PROPERTY`
:data:`.ASSOCIATION_PROXY`
"""
class _MappedAttribute(object):
"""Mixin for attributes which should be replaced by mapper-assigned
attributes.
"""
class MapperProperty(_MappedAttribute, _InspectionAttr):
"""Manage the relationship of a ``Mapper`` to a single class
attribute, as well as that attribute as it appears on individual
instances of the class, including attribute instrumentation,
attribute access, loading behavior, and dependency calculations.
The most common occurrences of :class:`.MapperProperty` are the
mapped :class:`.Column`, which is represented in a mapping as
an instance of :class:`.ColumnProperty`,
and a reference to another class produced by :func:`.relationship`,
represented in the mapping as an instance of
:class:`.RelationshipProperty`.
"""
cascade = frozenset()
"""The set of 'cascade' attribute names.
This collection is checked before the 'cascade_iterator' method is called.
"""
is_property = True
def setup(self, context, entity, path, adapter, **kwargs):
"""Called by Query for the purposes of constructing a SQL statement.
Each MapperProperty associated with the target mapper processes the
statement referenced by the query context, adding columns and/or
criterion as appropriate.
"""
pass
def create_row_processor(self, context, path,
mapper, row, adapter):
"""Return a 3-tuple consisting of three row processing functions.
"""
return None, None, None
def cascade_iterator(self, type_, state, visited_instances=None,
halt_on=None):
"""Iterate through instances related to the given instance for
a particular 'cascade', starting with this MapperProperty.
Return an iterator3-tuples (instance, mapper, state).
Note that the 'cascade' collection on this MapperProperty is
checked first for the given type before cascade_iterator is called.
See PropertyLoader for the related instance implementation.
"""
return iter(())
def set_parent(self, parent, init):
self.parent = parent
def instrument_class(self, mapper): # pragma: no-coverage
raise NotImplementedError()
@util.memoized_property
def info(self):
"""Info dictionary associated with the object, allowing user-defined
data to be associated with this :class:`.MapperProperty`.
The dictionary is generated when first accessed. Alternatively,
it can be specified as a constructor argument to the
:func:`.column_property`, :func:`.relationship`, or :func:`.composite`
functions.
.. versionadded:: 0.8 Added support for .info to all
:class:`.MapperProperty` subclasses.
.. seealso::
:attr:`.QueryableAttribute.info`
:attr:`.SchemaItem.info`
"""
return {}
_configure_started = False
_configure_finished = False
def init(self):
"""Called after all mappers are created to assemble
relationships between mappers and perform other post-mapper-creation
initialization steps.
"""
self._configure_started = True
self.do_init()
self._configure_finished = True
@property
def class_attribute(self):
"""Return the class-bound descriptor corresponding to this
MapperProperty."""
return getattr(self.parent.class_, self.key)
def do_init(self):
"""Perform subclass-specific initialization post-mapper-creation
steps.
This is a template method called by the ``MapperProperty``
object's init() method.
"""
pass
def post_instrument_class(self, mapper):
"""Perform instrumentation adjustments that need to occur
after init() has completed.
"""
pass
def is_primary(self):
"""Return True if this ``MapperProperty``'s mapper is the
primary mapper for its class.
This flag is used to indicate that the ``MapperProperty`` can
define attribute instrumentation for the class at the class
level (as opposed to the individual instance level).
"""
return not self.parent.non_primary
def merge(self, session, source_state, source_dict, dest_state,
dest_dict, load, _recursive):
"""Merge the attribute represented by this ``MapperProperty``
from source to destination object"""
pass
def compare(self, operator, value, **kw):
"""Return a compare operation for the columns represented by
this ``MapperProperty`` to the given value, which may be a
column value or an instance. 'operator' is an operator from
the operators module, or from sql.Comparator.
By default uses the PropComparator attached to this MapperProperty
under the attribute name "comparator".
"""
return operator(self.comparator, value)
def __repr__(self):
return '<%s at 0x%x; %s>' % (
self.__class__.__name__,
id(self), getattr(self, 'key', 'no key'))
class PropComparator(operators.ColumnOperators):
"""Defines boolean, comparison, and other operators for
:class:`.MapperProperty` objects.
SQLAlchemy allows for operators to
be redefined at both the Core and ORM level. :class:`.PropComparator`
is the base class of operator redefinition for ORM-level operations,
including those of :class:`.ColumnProperty`,
:class:`.RelationshipProperty`, and :class:`.CompositeProperty`.
.. note:: With the advent of Hybrid properties introduced in SQLAlchemy
0.7, as well as Core-level operator redefinition in
SQLAlchemy 0.8, the use case for user-defined :class:`.PropComparator`
instances is extremely rare. See :ref:`hybrids_toplevel` as well
as :ref:`types_operators`.
User-defined subclasses of :class:`.PropComparator` may be created. The
built-in Python comparison and math operator methods, such as
:meth:`.operators.ColumnOperators.__eq__`,
:meth:`.operators.ColumnOperators.__lt__`, and
:meth:`.operators.ColumnOperators.__add__`, can be overridden to provide
new operator behavior. The custom :class:`.PropComparator` is passed to
the :class:`.MapperProperty` instance via the ``comparator_factory``
argument. In each case,
the appropriate subclass of :class:`.PropComparator` should be used::
# definition of custom PropComparator subclasses
from sqlalchemy.orm.properties import \\
ColumnProperty,\\
CompositeProperty,\\
RelationshipProperty
class MyColumnComparator(ColumnProperty.Comparator):
def __eq__(self, other):
return self.__clause_element__() == other
class MyRelationshipComparator(RelationshipProperty.Comparator):
def any(self, expression):
"define the 'any' operation"
# ...
class MyCompositeComparator(CompositeProperty.Comparator):
def __gt__(self, other):
"redefine the 'greater than' operation"
return sql.and_(*[a>b for a, b in
zip(self.__clause_element__().clauses,
other.__composite_values__())])
# application of custom PropComparator subclasses
from sqlalchemy.orm import column_property, relationship, composite
from sqlalchemy import Column, String
class SomeMappedClass(Base):
some_column = column_property(Column("some_column", String),
comparator_factory=MyColumnComparator)
some_relationship = relationship(SomeOtherClass,
comparator_factory=MyRelationshipComparator)
some_composite = composite(
Column("a", String), Column("b", String),
comparator_factory=MyCompositeComparator
)
Note that for column-level operator redefinition, it's usually
simpler to define the operators at the Core level, using the
:attr:`.TypeEngine.comparator_factory` attribute. See
:ref:`types_operators` for more detail.
See also:
:class:`.ColumnProperty.Comparator`
:class:`.RelationshipProperty.Comparator`
:class:`.CompositeProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
def __init__(self, prop, parentmapper, adapt_to_entity=None):
self.prop = self.property = prop
self._parentmapper = parentmapper
self._adapt_to_entity = adapt_to_entity
def __clause_element__(self):
raise NotImplementedError("%r" % self)
def adapt_to_entity(self, adapt_to_entity):
"""Return a copy of this PropComparator which will use the given
:class:`.AliasedInsp` to produce corresponding expressions.
"""
return self.__class__(self.prop, self._parentmapper, adapt_to_entity)
@property
def adapter(self):
"""Produce a callable that adapts column expressions
to suit an aliased version of this comparator.
"""
if self._adapt_to_entity is None:
return None
else:
return self._adapt_to_entity._adapt_element
@util.memoized_property
def info(self):
return self.property.info
@staticmethod
def any_op(a, b, **kwargs):
return a.any(b, **kwargs)
@staticmethod
def has_op(a, b, **kwargs):
return a.has(b, **kwargs)
@staticmethod
def of_type_op(a, class_):
return a.of_type(class_)
def of_type(self, class_):
"""Redefine this object in terms of a polymorphic subclass.
Returns a new PropComparator from which further criterion can be
evaluated.
e.g.::
query.join(Company.employees.of_type(Engineer)).\\
filter(Engineer.name=='foo')
:param \class_: a class or mapper indicating that criterion will be
against this specific subclass.
"""
return self.operate(PropComparator.of_type_op, class_)
def any(self, criterion=None, **kwargs):
"""Return true if this collection contains any member that meets the
given criterion.
The usual implementation of ``any()`` is
:meth:`.RelationshipProperty.Comparator.any`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.any_op, criterion, **kwargs)
def has(self, criterion=None, **kwargs):
"""Return true if this element references a member which meets the
given criterion.
The usual implementation of ``has()`` is
:meth:`.RelationshipProperty.Comparator.has`.
:param criterion: an optional ClauseElement formulated against the
member class' table or attributes.
:param \**kwargs: key/value pairs corresponding to member class
attribute names which will be compared via equality to the
corresponding values.
"""
return self.operate(PropComparator.has_op, criterion, **kwargs)
class StrategizedProperty(MapperProperty):
"""A MapperProperty which uses selectable strategies to affect
loading behavior.
There is a single strategy selected by default. Alternate
strategies can be selected at Query time through the usage of
``StrategizedOption`` objects via the Query.options() method.
"""
strategy_wildcard_key = None
@util.memoized_property
def _wildcard_path(self):
if self.strategy_wildcard_key:
return ('loaderstrategy', (self.strategy_wildcard_key,))
else:
return None
def _get_context_strategy(self, context, path):
strategy_cls = path._inlined_get_for(self, context, 'loaderstrategy')
if not strategy_cls:
wc_key = self._wildcard_path
if wc_key and wc_key in context.attributes:
strategy_cls = context.attributes[wc_key]
if strategy_cls:
try:
return self._strategies[strategy_cls]
except KeyError:
return self.__init_strategy(strategy_cls)
return self.strategy
def _get_strategy(self, cls):
try:
return self._strategies[cls]
except KeyError:
return self.__init_strategy(cls)
def __init_strategy(self, cls):
self._strategies[cls] = strategy = cls(self)
return strategy
def setup(self, context, entity, path, adapter, **kwargs):
self._get_context_strategy(context, path).\
setup_query(context, entity, path,
adapter, **kwargs)
def create_row_processor(self, context, path, mapper, row, adapter):
return self._get_context_strategy(context, path).\
create_row_processor(context, path,
mapper, row, adapter)
def do_init(self):
self._strategies = {}
self.strategy = self.__init_strategy(self.strategy_class)
def post_instrument_class(self, mapper):
if self.is_primary() and \
not mapper.class_manager._attr_has_impl(self.key):
self.strategy.init_class_attribute(mapper)
class MapperOption(object):
"""Describe a modification to a Query."""
propagate_to_loaders = False
"""if True, indicate this option should be carried along
Query object generated by scalar or object lazy loaders.
"""
def process_query(self, query):
pass
def process_query_conditionally(self, query):
"""same as process_query(), except that this option may not
apply to the given query.
Used when secondary loaders resend existing options to a new
Query."""
self.process_query(query)
class PropertyOption(MapperOption):
"""A MapperOption that is applied to a property off the mapper or
one of its child mappers, identified by a dot-separated key
or list of class-bound attributes. """
def __init__(self, key, mapper=None):
self.key = key
self.mapper = mapper
def process_query(self, query):
self._process(query, True)
def process_query_conditionally(self, query):
self._process(query, False)
def _process(self, query, raiseerr):
paths = self._process_paths(query, raiseerr)
if paths:
self.process_query_property(query, paths)
def process_query_property(self, query, paths):
pass
def __getstate__(self):
d = self.__dict__.copy()
d['key'] = ret = []
for token in util.to_list(self.key):
if isinstance(token, PropComparator):
ret.append((token._parentmapper.class_, token.key))
else:
ret.append(token)
return d
def __setstate__(self, state):
ret = []
for key in state['key']:
if isinstance(key, tuple):
cls, propkey = key
ret.append(getattr(cls, propkey))
else:
ret.append(key)
state['key'] = tuple(ret)
self.__dict__ = state
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
if orm_util._is_aliased_class(mapper):
searchfor = mapper
else:
searchfor = orm_util._class_to_mapper(mapper)
for ent in query._mapper_entities:
if ent.corresponds_to(searchfor):
return ent
else:
if raiseerr:
if not list(query._mapper_entities):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
raise sa_exc.ArgumentError(
"Can't find property '%s' on any entity "
"specified in this Query. Note the full path "
"from root (%s) to target entity must be specified."
% (token, ",".join(str(x) for
x in query._mapper_entities))
)
else:
return None
def _find_entity_basestring(self, query, token, raiseerr):
for ent in query._mapper_entities:
# return only the first _MapperEntity when searching
# based on string prop name. Ideally object
# attributes are used to specify more exactly.
return ent
else:
if raiseerr:
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
return None
def _process_paths(self, query, raiseerr):
"""reconcile the 'key' for this PropertyOption with
the current path and entities of the query.
Return a list of affected paths.
"""
path = orm_util.PathRegistry.root
entity = None
paths = []
no_result = []
# _current_path implies we're in a
# secondary load with an existing path
current_path = list(query._current_path.path)
tokens = deque(self.key)
while tokens:
token = tokens.popleft()<|fim▁hole|> if token.endswith(':*'):
return [path.token(token)]
sub_tokens = token.split(".", 1)
token = sub_tokens[0]
tokens.extendleft(sub_tokens[1:])
# exhaust current_path before
# matching tokens to entities
if current_path:
if current_path[1].key == token:
current_path = current_path[2:]
continue
else:
return no_result
if not entity:
entity = self._find_entity_basestring(
query,
token,
raiseerr)
if entity is None:
return no_result
path_element = entity.entity_zero
mapper = entity.mapper
if hasattr(mapper.class_, token):
prop = getattr(mapper.class_, token).property
else:
if raiseerr:
raise sa_exc.ArgumentError(
"Can't find property named '%s' on the "
"mapped entity %s in this Query. " % (
token, mapper)
)
else:
return no_result
elif isinstance(token, PropComparator):
prop = token.property
# exhaust current_path before
# matching tokens to entities
if current_path:
if current_path[0:2] == \
[token._parententity, prop]:
current_path = current_path[2:]
continue
else:
return no_result
if not entity:
entity = self._find_entity_prop_comparator(
query,
prop.key,
token._parententity,
raiseerr)
if not entity:
return no_result
path_element = entity.entity_zero
mapper = entity.mapper
else:
raise sa_exc.ArgumentError(
"mapper option expects "
"string key or list of attributes")
assert prop is not None
if raiseerr and not prop.parent.common_parent(mapper):
raise sa_exc.ArgumentError("Attribute '%s' does not "
"link from element '%s'" % (token, path_element))
path = path[path_element][prop]
paths.append(path)
if getattr(token, '_of_type', None):
ac = token._of_type
ext_info = inspect(ac)
path_element = mapper = ext_info.mapper
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper, aliased=True,
_use_mapper_path=True)
ext_info = inspect(ac)
path.set(query._attributes, "path_with_polymorphic", ext_info)
else:
path_element = mapper = getattr(prop, 'mapper', None)
if mapper is None and tokens:
raise sa_exc.ArgumentError(
"Attribute '%s' of entity '%s' does not "
"refer to a mapped entity" %
(token, entity)
)
if current_path:
# ran out of tokens before
# current_path was exhausted.
assert not tokens
return no_result
return paths
class StrategizedOption(PropertyOption):
"""A MapperOption that affects which LoaderStrategy will be used
for an operation by a StrategizedProperty.
"""
chained = False
def process_query_property(self, query, paths):
strategy = self.get_strategy_class()
if self.chained:
for path in paths:
path.set(
query._attributes,
"loaderstrategy",
strategy
)
else:
paths[-1].set(
query._attributes,
"loaderstrategy",
strategy
)
def get_strategy_class(self):
raise NotImplementedError()
class LoaderStrategy(object):
"""Describe the loading behavior of a StrategizedProperty object.
The ``LoaderStrategy`` interacts with the querying process in three
ways:
* it controls the configuration of the ``InstrumentedAttribute``
placed on a class to handle the behavior of the attribute. this
may involve setting up class-level callable functions to fire
off a select operation when the attribute is first accessed
(i.e. a lazy load)
* it processes the ``QueryContext`` at statement construction time,
where it can modify the SQL statement that is being produced.
Simple column attributes may add their represented column to the
list of selected columns, *eager loading* properties may add
``LEFT OUTER JOIN`` clauses to the statement.
* It produces "row processor" functions at result fetching time.
These "row processor" functions populate a particular attribute
on a particular mapped instance.
"""
def __init__(self, parent):
self.parent_property = parent
self.is_class_level = False
self.parent = self.parent_property.parent
self.key = self.parent_property.key
def init_class_attribute(self, mapper):
pass
def setup_query(self, context, entity, path, adapter, **kwargs):
pass
def create_row_processor(self, context, path, mapper,
row, adapter):
"""Return row processing functions which fulfill the contract
specified by MapperProperty.create_row_processor.
StrategizedProperty delegates its create_row_processor method
directly to this method. """
return None, None, None
def __str__(self):
return str(self.parent_property)<|fim▁end|> | if isinstance(token, str):
# wildcard token |
<|file_name|>chart.js<|end_file_name|><|fim▁begin|>const chart = {
format: '{point.name}: {point.y:,.2f}',
colorNames: [
'success', 'info', 'warning', 'danger',
'primary', 'highlight', 'default'
],
colors: [],
patterns: [],
style: {},
plotOptions: function () {
return {
series: {
animation: false,
dataLabels: {
enabled: true,
format: this.format
},
cursor: 'pointer',
borderWidth: 3,
}
}
},
responsivePlotOptions: function () {
return {
series: {
animation: false,
dataLabels: {
format: this.format
}
}
}
},
rule: function () {
return {
condition: {
maxWidth: 500
},
chartOptions: {
plotOptions: this.responsivePlotOptions()
}
}
},
getPattern: function(index, colors) {
const p = index % Highcharts.patterns.length;
return {
pattern: Highcharts.merge(
Highcharts.patterns[p],
{ color: colors[index] }
)
}
},
cssVar: function(name) {
return this.style.getPropertyValue(`--${name}`)
},
init: function(decimal, thousand) {
this.style = getComputedStyle(document.documentElement)
for (let p = 0; p < 4; p++) {
for (let n = 0; n < this.colorNames.length; n++) {
const color = this.cssVar(
`${this.colorNames[n]}-${p}`<|fim▁hole|>
this.colors.push(color)
}
}
this.patterns = this.colors.map(
(_, index, array) => this.getPattern(index, array)
)
Highcharts.setOptions({
chart: {
style: {
fontFamily: this.cssVar('--font-general'),
},
},
lang: {
decimalPoint: decimal,
thousandsSep: thousand,
},
});
},
draw: function(id, title, seriesName, data) {
const chart = Highcharts.chart(
id,
{
chart: { type: 'pie' },
title: {
text: title,
style: {
color: this.cssVar('primary-0'),
}
},
colors: this.patterns,
plotOptions: this.plotOptions(),
series: [{ name: seriesName, data: data }],
responsive: { rules: [this.rule()] },
}
)
const that = this
$('#patterns-enabled').click(function () {
chart.update({
colors: this.checked ? that.patterns : that.colors
})
})
},
}<|fim▁end|> | ) |
<|file_name|>diasql.py<|end_file_name|><|fim▁begin|># PyDia SQL.py : SQL dump.
# Copy it to /usr/share/dia/python
import dia
# import sys
# import os
import string
import re
import datetime
<|fim▁hole|> self.f = None
def begin_render(self, data, filename):
self.f = open(filename, "w")
# name = os.path.split(filename)[1]
self.f.write('''BEGIN TRANSACTION;\n''')
for layer in data.layers:
self.WriteTables(layer)
def WriteTables(self, layer):
tables = {}
appdata = 'appdata'
priority = {'fields': 0, 'foreign_keys': 100}
# value for id
z = ["INSERT INTO zf VALUES ('id', 'No', 'INTEGER', '1');"]
z.append("INSERT INTO z VALUES('diadate', '%s');" % datetime.date.today().isoformat())
zsql = "INSERT INTO z VALUES('%s', '%s');"
zfsql = "INSERT INTO zf VALUES ('%s', '%s', '%s', '%s');"
ztsql = "INSERT INTO zt VALUES ('%s', '%s', '%s', '%s');"
for o in layer.objects:
if o.type.name == 'Database - Table':
if "name" in o.properties.keys():
table = o.properties["name"].value
elif "text" in o.properties.keys():
table = o.properties["text"].value.text
else:
continue
if len(table) == 0 or string.find(table, " ") >= 0:
continue
if table not in tables.keys():
tables[table] = ''
if table == appdata:
attrs = o.properties['attributes'].value
for attr in attrs:
z.append(zsql % (attr[0], attr[1]))
continue
# zt.append(comment)
# first line is label
# second line is label plural
# third line is rpr
clst = o.properties['comment'].value.split('\n')
if len(clst) >= 3:
z.append(ztsql % (table, clst[0], clst[1], clst[2]))
atributes = o.properties['attributes'].value
for i in range(0, len(atributes)):
a = atributes[i]
if a[0] == 'id':
tables[table] = '%0.3d\tid INTEGER PRIMARY KEY\n' %\
(priority['fields'] + i)
continue
if len(a[0]) > 4:
if a[0][-3:] == '_id':
nnul = ''
if a[4] == 0:
nnul = ' NOT NULL'
tables[table] += '%0.3d\t%s INTEGER%s REFERENCES %s(id)\n' % (priority['fields'] + i, a[0], nnul, a[0][:-3])
continue
tipo = ''
if re.match('.*enum\(.*', a[1], re.I):
tipo = a[1]
else:
tipo = a[1].upper()
if tipo == '':
tipo = 'TEXT'
tables[table] += '%0.3d\t%s %s' % (priority['fields'] + i, a[0], tipo)
if a[3] == 1:
tables[table] += ' PRIMARY KEY'
if a[4] == 0:
if a[3] != 1:
tables[table] += ' NOT NULL'
notnull = 1
else:
tables[table] += ''
notnull = 0
if a[5] == 1:
if a[3] != 1:
tables[table] += ' UNIQUE'
# Create insert for table zflbl
if (len(a[2]) > 0):
z.append(zfsql % (a[0], a[2], tipo, notnull))
tables[table] += '\n'
elif o.type.name == 'Database - Reference':
continue
for k in sorted(tables.keys()):
# self.f.write('\n-- %s --\nDROP TABLE IF EXISTS `%s`;\n' % (k,k) )
if k != appdata:
self.f.write('CREATE TABLE IF NOT EXISTS %s (\n' % k)
sentences = sorted(tables[k].split('\n'))
sentences = [str(s[3:]) for s in sentences if len(s) > 4]
sentences = ",\n".join(sentences)
self.f.write('%s\n' % sentences)
self.f.write(');\n')
self.f.write('CREATE TABLE IF NOT EXISTS z (key TEXT PRIMARY KEY, val TEXT NOT NULL);\n')
self.f.write('CREATE TABLE IF NOT EXISTS zt (tbl TEXT PRIMARY KEY, tlbl TEXT NOT NULL UNIQUE, tlblp TEXT NOT NULL UNIQUE, rpr TEXT NOT NULL);\n')
self.f.write('CREATE TABLE IF NOT EXISTS zf (fld TEXT PRIMARY KEY, flbl TEXT NOT NULL UNIQUE, typos TEXT NOT NULL, nonull INTEGER NOT NULL DEFAULT 1);\n')
self.f.write('\n'.join(sorted(z)))
self.f.write('\n')
def end_render(self):
self.f.write('COMMIT;\n')
self.f.close()
# reference
dia.register_export("PyDia SQL generator", "sql", SQLRenderer())<|fim▁end|> |
class SQLRenderer:
def __init__(self): |
<|file_name|>_UpdateSpaceRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.client.v2.spaces;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.cloudfoundry.Nullable;
import org.immutables.value.Value;
import java.util.List;
/**
* The request payload for the Update a Space operation
*/
@JsonSerialize
@Value.Immutable
abstract class _UpdateSpaceRequest {
/**
* Allow SSH
*/
@JsonProperty("allow_ssh")
@Nullable
abstract Boolean getAllowSsh();
/**
* The auditor ids
*/
@JsonProperty("auditor_guids")
@Nullable
abstract List<String> getAuditorIds();
/**
* The developer ids
*/
@JsonProperty("developer_guids")
@Nullable
abstract List<String> getDeveloperIds();
/**
* The domain ids
*/
@JsonProperty("domain_guids")
@Nullable
abstract List<String> getDomainIds();
/**
* The manager ids
*/
@JsonProperty("manager_guids")
@Nullable
abstract List<String> getManagerIds();
/**
* The name
*/
@JsonProperty("name")
@Nullable
abstract String getName();
/**
* The organization id
*/
@JsonProperty("organization_guid")
@Nullable
abstract String getOrganizationId();
/**
* The security group ids
*/
@JsonProperty("security_group_guids")
@Nullable
abstract List<String> getSecurityGroupIds();
/**
* The space id
*/<|fim▁hole|> abstract String getSpaceId();
}<|fim▁end|> | @JsonIgnore |
<|file_name|>db.py<|end_file_name|><|fim▁begin|>import pickle
import redis
from pod_manager.settings import REDIS_HOST, REDIS_PORT, REDIS_DB
__all__ = [
'get_client',
'cache_object',
'get_object'
]
def get_client():
client = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, db=REDIS_DB)
return client
def cache_object(client, key, obj, ttl=60):
pipe = client.pipeline()
data = pickle.dumps(obj)
pipe.set(key, data)
if ttl:
pipe.expire(key, ttl)
pipe.execute()<|fim▁hole|> if not data:
return None
obj = pickle.loads(data)
return obj<|fim▁end|> |
def get_object(client, key):
data = client.get(key)
|
<|file_name|>expected.js<|end_file_name|><|fim▁begin|>"use strict";<|fim▁hole|>});
exports.default = function () {};
exports.default = foo;
exports.default = 42;
exports.default = {};
exports.default = [];
exports.default = foo;
exports.default = class {};
function foo() {}
class Foo {}
exports.default = Foo;
exports.default = foo;
exports.default = (function () {
return "foo";
})();<|fim▁end|> |
Object.defineProperty(exports, "__esModule", {
value: true |
<|file_name|>environment.js<|end_file_name|><|fim▁begin|>"use strict"<|fim▁hole|> try {
mentionBotEnvConfig = JSON.parse(process.env.MENTION_BOT_CONFIG);
} catch(e) {
mentionBotEnvConfig = {};
}
return Object.keys(config).reduce((previousValue, key) => {
let defaultConfigValue = config[key];
let environmentVariable = mentionBotEnvConfig[key];
let configElement = {};
configElement[key] = environmentVariable === undefined ? defaultConfigValue
: environmentVariable;
return {...previousValue, ...configElement};
}, {});
}
module.exports = {
checkEnvironmentForConfig
}<|fim▁end|> |
function checkEnvironmentForConfig(config:Object) : Object {
let mentionBotEnvConfig;
|
<|file_name|>J.cpp<|end_file_name|><|fim▁begin|>#include<bits/stdc++.h>
using namespace std;
typedef long long LL;
LL n,m,A,B,C;
LL solve1(LL x,LL y){
LL ans=0;
ans+=y*max(max(A,B),C);
if(x%2==1){<|fim▁hole|> }
return ans;
}
LL solve2(LL x,LL y){
LL ans=0;
ans+=x*min(min(A,B),C);
if(y%2==1){
ans+=min((y-1)*(A+C)/2,(y-1)*B);
}else{
ans+=min((y-2)*(A+C)/2,(y-2)*B);
ans+=min(B,max(A,C));
}
return ans;
}
int main(){
int T;cin>>T;
for(int t=1;t<=T;t++){
cin>>n>>m;
cin>>A>>B>>C;
printf("Case #%d: %lld %lld\n",t,solve1(m+1,n-m-1),solve2(m-1,n-m+1));
}
return 0;
}<|fim▁end|> | ans+=max((x-1)*(A+C)/2,(x-1)*B);
}else{
ans+=max((x-2)*(A+C)/2,(x-2)*B);
ans+=max(B,min(A,C)); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""SymPy is a Python library for symbolic mathematics. It aims to become a
full-featured computer algebra system (CAS) while keeping the code as
simple as possible in order to be comprehensible and easily extensible.
SymPy is written entirely in Python and does not require any external
libraries, except optionally for plotting support.
See the webpage for more information and documentation:
http://sympy.org
"""
from __future__ import absolute_import, print_function
from sympy.release import __version__
import sys
if sys.version_info[0] == 2 and sys.version_info[1] < 6:
raise ImportError("Python Version 2.6 or above is required for SymPy.")
else: # Python 3
pass
# Here we can also check for specific Python 3 versions, if needed
del sys
def __sympy_debug():
# helper function so we don't import os globally
import os
debug_str = os.getenv('SYMPY_DEBUG', 'False')
if debug_str in ('True', 'False'):
return eval(debug_str)
else:
raise RuntimeError("unrecognized value for SYMPY_DEBUG: %s" %
debug_str)
SYMPY_DEBUG = __sympy_debug()
from .core import *
from .logic import *
from .assumptions import *
from .polys import *
from .series import *
from .functions import *
from .ntheory import *
from .concrete import *
from .simplify import *
from .sets import *
from .solvers import *
from .matrices import *
from .geometry import *
from .utilities import *
from .integrals import *
from .tensor import *
from .parsing import *
from .calculus import *
# Adds about .04-.05 seconds of import time
# from combinatorics import *
# This module is slow to import:
#from physics import units
from .plotting import plot, textplot, plot_backends, plot_implicit
from .printing import pretty, pretty_print, pprint, pprint_use_unicode, \
pprint_try_use_unicode, print_gtk, print_tree, pager_print, TableForm
from .printing import ccode, fcode, jscode, mathematica_code, octave_code, \
latex, preview
from .printing import python, print_python, srepr, sstr, sstrrepr
from .interactive import init_session, init_printing<|fim▁hole|>
evalf._create_evalf_table()
# This is slow to import:
#import abc<|fim▁end|> | |
<|file_name|>comm_adapters.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use clone::Clone;
use cmp;
use sync::mpsc::{Sender, Receiver};
use old_io;
use option::Option::{None, Some};
use result::Result::{Ok, Err};
use slice::{bytes, SliceExt};
use super::{Buffer, Reader, Writer, IoResult};
use vec::Vec;
/// Allows reading from a rx.
///
/// # Example
///
/// ```
/// use std::sync::mpsc::channel;
/// use std::old_io::ChanReader;
///
/// let (tx, rx) = channel();
/// # drop(tx);
/// let mut reader = ChanReader::new(rx);
///
/// let mut buf = [0u8; 100];
/// match reader.read(&mut buf) {
/// Ok(nread) => println!("Read {} bytes", nread),
/// Err(e) => println!("read error: {}", e),
/// }
/// ```
pub struct ChanReader {
buf: Vec<u8>, // A buffer of bytes received but not consumed.
pos: uint, // How many of the buffered bytes have already be consumed.
rx: Receiver<Vec<u8>>, // The Receiver to pull data from.
closed: bool, // Whether the channel this Receiver connects to has been closed.
}
impl ChanReader {
/// Wraps a `Port` in a `ChanReader` structure
pub fn new(rx: Receiver<Vec<u8>>) -> ChanReader {
ChanReader {
buf: Vec::new(),
pos: 0,
rx: rx,
closed: false,
}
}
}
impl Buffer for ChanReader {
fn fill_buf<'a>(&'a mut self) -> IoResult<&'a [u8]> {
if self.pos >= self.buf.len() {
self.pos = 0;
match self.rx.recv() {
Ok(bytes) => {
self.buf = bytes;
},
Err(..) => {
self.closed = true;
self.buf = Vec::new();
}
}
}
if self.closed {
Err(old_io::standard_error(old_io::EndOfFile))
} else {
Ok(&self.buf[self.pos..])
}
}
fn consume(&mut self, amt: uint) {
self.pos += amt;
assert!(self.pos <= self.buf.len());
}
}
impl Reader for ChanReader {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let mut num_read = 0;
loop {
let count = match self.fill_buf().ok() {
Some(src) => {
let dst = &mut buf[num_read..];
let count = cmp::min(src.len(), dst.len());
bytes::copy_memory(dst, &src[..count]);
count
},
None => 0,
};
self.consume(count);
num_read += count;
if num_read == buf.len() || self.closed {
break;
}
}
if self.closed && num_read == 0 {
Err(old_io::standard_error(old_io::EndOfFile))
} else {
Ok(num_read)
}
}
}
/// Allows writing to a tx.
///
/// # Example
///
/// ```
/// # #![allow(unused_must_use)]
/// use std::sync::mpsc::channel;
/// use std::old_io::ChanWriter;
///
/// let (tx, rx) = channel();
/// # drop(rx);
/// let mut writer = ChanWriter::new(tx);
/// writer.write("hello, world".as_bytes());
/// ```
pub struct ChanWriter {
tx: Sender<Vec<u8>>,
}
impl ChanWriter {
/// Wraps a channel in a `ChanWriter` structure
pub fn new(tx: Sender<Vec<u8>>) -> ChanWriter {
ChanWriter { tx: tx }
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl Clone for ChanWriter {
fn clone(&self) -> ChanWriter {
ChanWriter { tx: self.tx.clone() }
}
}
impl Writer for ChanWriter {
fn write_all(&mut self, buf: &[u8]) -> IoResult<()> {
self.tx.send(buf.to_vec()).map_err(|_| {
old_io::IoError {
kind: old_io::BrokenPipe,
desc: "Pipe closed",
detail: None
}
})
}
}
#[cfg(test)]
mod test {
use prelude::v1::*;
use sync::mpsc::channel;
use super::*;
use old_io;
use thread::Thread;
#[test]
fn test_rx_reader() {
let (tx, rx) = channel();
Thread::spawn(move|| {
tx.send(vec![1u8, 2u8]).unwrap();
tx.send(vec![]).unwrap();
tx.send(vec![3u8, 4u8]).unwrap();
tx.send(vec![5u8, 6u8]).unwrap();
tx.send(vec![7u8, 8u8]).unwrap();
});
let mut reader = ChanReader::new(rx);
let mut buf = [0u8; 3];
assert_eq!(Ok(0), reader.read(&mut []));
assert_eq!(Ok(3), reader.read(&mut buf));
let a: &[u8] = &[1,2,3];
assert_eq!(a, buf);
assert_eq!(Ok(3), reader.read(&mut buf));
let a: &[u8] = &[4,5,6];
assert_eq!(a, buf);
assert_eq!(Ok(2), reader.read(&mut buf));
let a: &[u8] = &[7,8,6];
assert_eq!(a, buf);
match reader.read(&mut buf) {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind, old_io::EndOfFile),
}
assert_eq!(a, buf);
// Ensure it continues to panic in the same way.
match reader.read(&mut buf) {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind, old_io::EndOfFile),
}
assert_eq!(a, buf);
}
#[test]
fn test_rx_buffer() {
let (tx, rx) = channel();
Thread::spawn(move|| {
tx.send(b"he".to_vec()).unwrap();
tx.send(b"llo wo".to_vec()).unwrap();
tx.send(b"".to_vec()).unwrap();
tx.send(b"rld\nhow ".to_vec()).unwrap();
tx.send(b"are you?".to_vec()).unwrap();
tx.send(b"".to_vec()).unwrap();
});
let mut reader = ChanReader::new(rx);
assert_eq!(Ok("hello world\n".to_string()), reader.read_line());
assert_eq!(Ok("how are you?".to_string()), reader.read_line());
match reader.read_line() {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind, old_io::EndOfFile),
}
}
#[test]<|fim▁hole|>
let wanted = vec![0u8, 0u8, 0u8, 42u8];
let got = match Thread::scoped(move|| { rx.recv().unwrap() }).join() {
Ok(got) => got,
Err(_) => panic!(),
};
assert_eq!(wanted, got);
match writer.write_u8(1) {
Ok(..) => panic!(),
Err(e) => assert_eq!(e.kind, old_io::BrokenPipe),
}
}
}<|fim▁end|> | fn test_chan_writer() {
let (tx, rx) = channel();
let mut writer = ChanWriter::new(tx);
writer.write_be_u32(42).unwrap(); |
<|file_name|>RegisterUserModel.java<|end_file_name|><|fim▁begin|>package marcin_szyszka.mobileseconndhand.models;
/**
* Created by marcianno on 2016-03-02.
*/
public class RegisterUserModel {<|fim▁hole|>
public RegisterUserModel(){
}
}<|fim▁end|> | public String Email;
public String Password;
public String ConfirmPassword; |
<|file_name|>mnist-visualizations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: mnist-visualizations.py
"""
The same MNIST ConvNet example, but with weights/activations visualization.
"""
import tensorflow as tf
from tensorpack import *
from tensorpack.dataflow import dataset
IMAGE_SIZE = 28
def visualize_conv_weights(filters, name):
"""Visualize use weights in convolution filters.
Args:
filters: tensor containing the weights [H,W,Cin,Cout]
name: label for tensorboard
Returns:
image of all weight
"""
with tf.name_scope('visualize_w_' + name):
filters = tf.transpose(filters, (3, 2, 0, 1)) # [h, w, cin, cout] -> [cout, cin, h, w]
filters = tf.unstack(filters) # --> cout * [cin, h, w]
filters = tf.concat(filters, 1) # --> [cin, cout * h, w]
filters = tf.unstack(filters) # --> cin * [cout * h, w]
filters = tf.concat(filters, 1) # --> [cout * h, cin * w]
filters = tf.expand_dims(filters, 0)
filters = tf.expand_dims(filters, -1)
tf.summary.image('visualize_w_' + name, filters)
def visualize_conv_activations(activation, name):
"""Visualize activations for convolution layers.
Remarks:
This tries to place all activations into a square.
Args:
activation: tensor with the activation [B,H,W,C]
name: label for tensorboard
Returns:
image of almost all activations
"""
import math
with tf.name_scope('visualize_act_' + name):
_, h, w, c = activation.get_shape().as_list()
rows = []
c_per_row = int(math.sqrt(c))
for y in range(0, c - c_per_row, c_per_row):
row = activation[:, :, :, y:y + c_per_row] # [?, H, W, 32] --> [?, H, W, 5]
cols = tf.unstack(row, axis=3) # [?, H, W, 5] --> 5 * [?, H, W]
row = tf.concat(cols, 1)
rows.append(row)
viz = tf.concat(rows, 2)
tf.summary.image('visualize_act_' + name, tf.expand_dims(viz, -1))
class Model(ModelDesc):
def inputs(self):
return [tf.placeholder(tf.float32, (None, IMAGE_SIZE, IMAGE_SIZE), 'input'),
tf.placeholder(tf.int32, (None,), 'label')]
def build_graph(self, image, label):
image = tf.expand_dims(image * 2 - 1, 3)
with argscope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32):
c0 = Conv2D('conv0', image)
p0 = MaxPooling('pool0', c0, 2)
c1 = Conv2D('conv1', p0)
c2 = Conv2D('conv2', c1)
p1 = MaxPooling('pool1', c2, 2)
c3 = Conv2D('conv3', p1)
fc1 = FullyConnected('fc0', c3, 512, nl=tf.nn.relu)
fc1 = Dropout('dropout', fc1, 0.5)
logits = FullyConnected('fc1', fc1, out_dim=10, nl=tf.identity)
with tf.name_scope('visualizations'):
visualize_conv_weights(c0.variables.W, 'conv0')
visualize_conv_activations(c0, 'conv0')
visualize_conv_weights(c1.variables.W, 'conv1')
visualize_conv_activations(c1, 'conv1')
visualize_conv_weights(c2.variables.W, 'conv2')
visualize_conv_activations(c2, 'conv2')
visualize_conv_weights(c3.variables.W, 'conv3')
visualize_conv_activations(c3, 'conv3')
tf.summary.image('input', (image + 1.0) * 128., 3)
cost = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=label)
cost = tf.reduce_mean(cost, name='cross_entropy_loss')
tf.reduce_mean(tf.to_float(tf.nn.in_top_k(logits, label, 1)), name='accuracy')
wd_cost = tf.multiply(1e-5,
regularize_cost('fc.*/W', tf.nn.l2_loss),
name='regularize_loss')
return tf.add_n([wd_cost, cost], name='total_cost')
def optimizer(self):
lr = tf.train.exponential_decay(
learning_rate=1e-3,
global_step=get_global_step_var(),
decay_steps=468 * 10,
decay_rate=0.3, staircase=True, name='learning_rate')
tf.summary.scalar('lr', lr)
return tf.train.AdamOptimizer(lr)
def get_data():
train = BatchData(dataset.Mnist('train'), 128)
test = BatchData(dataset.Mnist('test'), 256, remainder=True)
return train, test
if __name__ == '__main__':
logger.auto_set_dir()
dataset_train, dataset_test = get_data()
config = TrainConfig(
model=Model(),
dataflow=dataset_train,
callbacks=[
ModelSaver(),
InferenceRunner(
dataset_test, ScalarStats(['cross_entropy_loss', 'accuracy'])),
],
steps_per_epoch=len(dataset_train),<|fim▁hole|> max_epoch=100,
)
launch_train_with_config(config, SimpleTrainer())<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
* Copyright (c) 2012-2017, Nic McDonald and Adriana Flores
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* - Neither the name of prim nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
"""
import codecs
import re
import os
import sys
try:
from setuptools import setup
except:
print('please install setuptools via pip:')
print(' pip3 install setuptools')
sys.exit(-1)
def find_version(*file_paths):
version_file = codecs.open(os.path.join(os.path.abspath(
os.path.dirname(__file__)), *file_paths), 'r').read()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:<|fim▁hole|> raise RuntimeError("Unable to find version string.")
setup(
name='sssweep',
version=find_version('sssweep', '__init__.py'),
description='Automatic task generation for SuperSim sweeps and plot web viewer',
author='Nic McDonald and Adriana Flores',
author_email='[email protected] and [email protected]',
license='BSD',
url='http://github.com/nicmcd/sssweep',
packages=['sssweep'],
install_requires=['taskrun >= 3.0.0',
'ssplot >= 0.1.0'],
)<|fim▁end|> | return version_match.group(1) |
<|file_name|>strategy.go<|end_file_name|><|fim▁begin|>// +build linux
package network
import (
"errors"
)
var (
ErrNotValidStrategyType = errors.New("not a valid network strategy type")
)
var strategies = map[string]NetworkStrategy{
"veth": &Veth{},
"loopback": &Loopback{},<|fim▁hole|>// NetworkStrategy represents a specific network configuration for
// a container's networking stack
type NetworkStrategy interface {
Create(*Network, int, *NetworkState) error
Initialize(*Network, *NetworkState) error
}
// GetStrategy returns the specific network strategy for the
// provided type. If no strategy is registered for the type an
// ErrNotValidStrategyType is returned.
func GetStrategy(tpe string) (NetworkStrategy, error) {
s, exists := strategies[tpe]
if !exists {
return nil, ErrNotValidStrategyType
}
return s, nil
}<|fim▁end|> | }
|
<|file_name|>MathHelper.java<|end_file_name|><|fim▁begin|>package codechicken.lib.math;<|fim▁hole|>//TODO cleanup.
public class MathHelper {
public static final double phi = 1.618033988749894;
public static final double pi = Math.PI;
public static final double todeg = 57.29577951308232;
public static final double torad = 0.017453292519943;
public static final double sqrt2 = 1.414213562373095;
public static double[] SIN_TABLE = new double[65536];
static {
for (int i = 0; i < 65536; ++i) {
SIN_TABLE[i] = Math.sin(i / 65536D * 2 * Math.PI);
}
SIN_TABLE[0] = 0;
SIN_TABLE[16384] = 1;
SIN_TABLE[32768] = 0;
SIN_TABLE[49152] = 1;
}
public static double sin(double d) {
return SIN_TABLE[(int) ((float) d * 10430.378F) & 65535];
}
public static double cos(double d) {
return SIN_TABLE[(int) ((float) d * 10430.378F + 16384.0F) & 65535];
}
/**
* @param a The value
* @param b The value to approach
* @param max The maximum step
* @return the closed value to b no less than max from a
*/
public static float approachLinear(float a, float b, float max) {
return (a > b) ? (a - b < max ? b : a - max) : (b - a < max ? b : a + max);
}
/**
* @param a The value
* @param b The value to approach
* @param max The maximum step
* @return the closed value to b no less than max from a
*/
public static double approachLinear(double a, double b, double max) {
return (a > b) ? (a - b < max ? b : a - max) : (b - a < max ? b : a + max);
}
/**
* @param a The first value
* @param b The second value
* @param d The interpolation factor, between 0 and 1
* @return a+(b-a)*d
*/
public static float interpolate(float a, float b, float d) {
return a + (b - a) * d;
}
/**
* @param a The first value
* @param b The second value
* @param d The interpolation factor, between 0 and 1
* @return a+(b-a)*d
*/
public static double interpolate(double a, double b, double d) {
return a + (b - a) * d;
}
/**
* @param a The value
* @param b The value to approach
* @param ratio The ratio to reduce the difference by
* @return a+(b-a)*ratio
*/
public static double approachExp(double a, double b, double ratio) {
return a + (b - a) * ratio;
}
/**
* @param a The value
* @param b The value to approach
* @param ratio The ratio to reduce the difference by
* @param cap The maximum amount to advance by
* @return a+(b-a)*ratio
*/
public static double approachExp(double a, double b, double ratio, double cap) {
double d = (b - a) * ratio;
if (Math.abs(d) > cap) {
d = Math.signum(d) * cap;
}
return a + d;
}
/**
* @param a The value
* @param b The value to approach
* @param ratio The ratio to reduce the difference by
* @param c The value to retreat from
* @param kick The difference when a == c
* @return
*/
public static double retreatExp(double a, double b, double c, double ratio, double kick) {
double d = (Math.abs(c - a) + kick) * ratio;
if (d > Math.abs(b - a)) {
return b;
}
return a + Math.signum(b - a) * d;
}
/**
* @param value The value
* @param min The min value
* @param max The max value
* @return The clipped value between min and max
*/
public static double clip(double value, double min, double max) {
if (value > max) {
value = max;
}
if (value < min) {
value = min;
}
return value;
}
/**
* @param value The value
* @param min The min value
* @param max The max value
* @return The clipped value between min and max
*/
public static float clip(float value, float min, float max) {
if (value > max) {
value = max;
}
if (value < min) {
value = min;
}
return value;
}
/**
* @param value The value
* @param min The min value
* @param max The max value
* @return The clipped value between min and max
*/
public static int clip(int value, int min, int max) {
if (value > max) {
value = max;
}
if (value < min) {
value = min;
}
return value;
}
/**
* Maps a value range to another value range.
*
* @param valueIn The value to map.
* @param inMin The minimum of the input value range.
* @param inMax The maximum of the input value range
* @param outMin The minimum of the output value range.
* @param outMax The maximum of the output value range.
* @return The mapped value.
*/
public static double map(double valueIn, double inMin, double inMax, double outMin, double outMax) {
return (valueIn - inMin) * (outMax - outMin) / (inMax - inMin) + outMin;
}
/**
* Maps a value range to another value range.
*
* @param valueIn The value to map.
* @param inMin The minimum of the input value range.
* @param inMax The maximum of the input value range
* @param outMin The minimum of the output value range.
* @param outMax The maximum of the output value range.
* @return The mapped value.
*/
public static float map(float valueIn, float inMin, float inMax, float outMin, float outMax) {
return (valueIn - inMin) * (outMax - outMin) / (inMax - inMin) + outMin;
}
/**
* Rounds the number of decimal places based on the given multiplier.<br>
* e.g.<br>
* Input: 17.5245743<br>
* multiplier: 1000<br>
* Output: 17.534<br>
* multiplier: 10<br>
* Output 17.5<br><br>
*
* @param number The input value.
* @param multiplier The multiplier.
* @return The input rounded to a number of decimal places based on the multiplier.
*/
public static double round(double number, double multiplier) {
return Math.round(number * multiplier) / multiplier;
}
/**
* Rounds the number of decimal places based on the given multiplier.<br>
* e.g.<br>
* Input: 17.5245743<br>
* multiplier: 1000<br>
* Output: 17.534<br>
* multiplier: 10<br>
* Output 17.5<br><br>
*
* @param number The input value.
* @param multiplier The multiplier.
* @return The input rounded to a number of decimal places based on the multiplier.
*/
public static float round(float number, float multiplier) {
return Math.round(number * multiplier) / multiplier;
}
/**
* @return min <= value <= max
*/
public static boolean between(double min, double value, double max) {
return min <= value && value <= max;
}
public static int approachExpI(int a, int b, double ratio) {
int r = (int) Math.round(approachExp(a, b, ratio));
return r == a ? b : r;
}
public static int retreatExpI(int a, int b, int c, double ratio, int kick) {
int r = (int) Math.round(retreatExp(a, b, c, ratio, kick));
return r == a ? b : r;
}
public static int floor(double d) {
return net.minecraft.util.math.MathHelper.floor_double(d);
}
public static int floor(float d) {
return net.minecraft.util.math.MathHelper.floor_float(d);
}
public static int ceil(double d) {
return net.minecraft.util.math.MathHelper.ceiling_double_int(d);
}
public static int ceil(float d) {
return net.minecraft.util.math.MathHelper.ceiling_float_int(d);
}
public static float sqrt(float f) {
return net.minecraft.util.math.MathHelper.sqrt_float(f);
}
public static float sqrt(double f) {
return net.minecraft.util.math.MathHelper.sqrt_double(f);
}
public static int roundAway(double d) {
return (int) (d < 0 ? Math.floor(d) : Math.ceil(d));
}
public static int compare(int a, int b) {
return a == b ? 0 : a < b ? -1 : 1;
}
public static int compare(double a, double b) {
return a == b ? 0 : a < b ? -1 : 1;
}
public static int absSum(BlockPos pos) {
return (pos.getX() < 0 ? -pos.getX() : pos.getX()) + (pos.getY() < 0 ? -pos.getY() : pos.getY()) + (pos.getZ() < 0 ? -pos.getZ() : pos.getZ());
}
public static boolean isAxial(BlockPos pos) {
return pos.getX() == 0 ? (pos.getY() == 0 || pos.getZ() == 0) : (pos.getY() == 0 && pos.getZ() == 0);
}
public static int toSide(BlockPos pos) {
if (!isAxial(pos)) {
return -1;
}
if (pos.getY() < 0) {
return 0;
}
if (pos.getY() > 0) {
return 1;
}
if (pos.getZ() < 0) {
return 2;
}
if (pos.getZ() > 0) {
return 3;
}
if (pos.getX() < 0) {
return 4;
}
if (pos.getX() > 0) {
return 5;
}
return -1;
}
}<|fim▁end|> |
import net.minecraft.util.math.BlockPos;
|
<|file_name|>maptune.GoogleV2.js<|end_file_name|><|fim▁begin|>/**********************************************************************
map_GoogleV2.js
$Comment: provides JavaScript for Google Api V2 calls
$Source :map_GoogleV2.js,v $
$InitialAuthor: guenter richter $
$InitialDate: 2011/01/03 $
$Author: guenter richter $
$Id:map_GoogleV2.js 1 2011-01-03 10:30:35Z Guenter Richter $
Copyright (c) Guenter Richter
$Log:map_GoogleV2.js,v $
**********************************************************************/
/**
* @fileoverview This is the interface to the Google maps API v2
*
* @author Guenter Richter [email protected]
* @version 0.9
*/
/* ...................................................................*
* global vars *
* ...................................................................*/
/* ...................................................................*
* Google directions *
* ...................................................................*/
function __directions_handleErrors(){
var result = $("#directions-result")[0];
if (gdir.getStatus().code == G_GEO_UNKNOWN_ADDRESS)
result.innerHTML = ("Indirizzo sconosciuto. Forse è troppo nuovo o sbagliato.\n Codice errore: " + gdir.getStatus().code);
else if (gdir.getStatus().code == G_GEO_SERVER_ERROR)
result.innerHTML = ("Richiesta non riuscita.\n Codice errore: " + gdir.getStatus().code);
else if (gdir.getStatus().code == G_GEO_MISSING_QUERY)
result.innerHTML = ("Inserire indirizzi! \n Codice errore: " + gdir.getStatus().code);
else if (gdir.getStatus().code == G_GEO_BAD_KEY)
result.innerHTML = ("The given key is either invalid or does not match the domain for which it was given. \n Error code: " + gdir.getStatus().code);
else if (gdir.getStatus().code == G_GEO_BAD_REQUEST)
result.innerHTML = ("A directions request could not be successfully parsed.\n Error code: " + gdir.getStatus().code);
else result.innerHTML = ("Errore sconosciuto!");
}
function _map_setDirections(map,fromAddress, toAddress, toHidden, locale) {
var result = $("#directions-result")[0];
result.innerHTML = "";
gdir.loadFromWaypoints([fromAddress,toHidden],
{ "locale": locale, "preserveViewport":true });
}
function _map_setDestinationWaypoint(marker){
if ( marker ){
var form = $("#directionsform")[0];
if ( form ){
form.to.value = marker.data.name;
if ( marker.getLatLng ){
form.toHidden.value = marker.getLatLng();
}
else if( marker.getVertex ){
form.toHidden.value = marker.getVertex(0);
}
}
<|fim▁hole|> * Is called 'onload' to start creating the map
*/
function _map_loadMap(target){
var __map = null;
// if google maps API v2 is loaded
if ( GMap2 ){
// check if browser can handle Google Maps
if ( !GBrowserIsCompatible()) {
alert("sorry - your browser cannot handle Google Maps !");
return null;
}
__map = new GMap2(target);
if ( __map ){
// configure user map interface
__map.addControl(new GMapTypeControl());
// map.addControl(new GMenuMapTypeControl());
__map.addControl(new GLargeMapControl3D());
__map.addControl(new GScaleControl());
__map.addMapType(G_PHYSICAL_MAP);
__map.addMapType(G_SATELLITE_3D_MAP);
__map.enableDoubleClickZoom();
__map.enableScrollWheelZoom();
}
}
return __map;
}
/**
* Is called to set up directions query
*/
function _map_addDirections(map,target){
if (map){
gdir = new GDirections(map,target);
GEvent.addListener(gdir, "error", __directions_handleErrors);
}
}
/**
* Is called to set up traffic information layer
*/
function _map_addTrafficLayer(map,target){
/* tbd */
}
/**
* Is called to set event handler
*/
function _map_addEventListner(map,szEvent,callback,mapUp){
if (map){
GEvent.addListener(map, szEvent, GEvent.callback(map,callback,mapUp) );
}
}
/**
* Is called 'onunload' to clear objects
*/
function _map_unloadMap(map){
if (map){
GUnload();
}
}
// set map center and zoom
//
function _map_setMapExtension(map,bBox){
if (map){
var mapCenter = { lon: (bBox[0] + (bBox[1]-bBox[0])/2) , lat: (bBox[2] + (bBox[3]-bBox[2])/2) };
var mapZoom = map.getBoundsZoomLevel(new GLatLngBounds( new GLatLng(bBox[2],bBox[0]),
new GLatLng(bBox[3],bBox[1]) ) );
map.setCenter(new GLatLng(mapCenter.lat, mapCenter.lon), mapZoom);
}
}
// get map zoom
//
function _map_getZoom(map){
if (map){
return map.getZoom();
}
return 0;
}
// get map center
//
function _map_getCenter(map){
if (map){
return map.getCenter();
}
return null;
}
// set map zoom
//
function _map_setZoom(map,nZoom){
if (map){
map.setZoom(nZoom);
}
}
// set map center
//
function _map_setCenter(map,center){
if (map){
map.setCenter(center);
}
}
// set map center and zoom
//
function _map_setCenterAndZoom(map,center,nZoom){
if (map){
map.setCenter(center,nZoom);
}
}
// create custom tooltip
//
function _map_createMyTooltip(marker, text, padding){
var tooltip = new Tooltip(marker, text, padding);
marker.tooltip = tooltip;
map.addOverlay(tooltip);
}
function _map_createMyTooltipListener(element, tooltip){
GEvent.addDomListener(element,'mouseover',GEvent.callback(tooltip,
Tooltip.prototype.show));
GEvent.addDomListener(element,'mouseout',GEvent.callback(tooltip,
Tooltip.prototype.hide));
}
// -----------------------------
// EOF
// -----------------------------<|fim▁end|> | }
}
/**
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2014 - Intel.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sqlalchemy as sa
import uuid
from healing.db.sqlalchemy import model_base as mb
## Helpers
#TODO: i think oslo has uuid utils, replace here!
def _generate_unicode_uuid():
return unicode(str(uuid.uuid4()))
def _id_column():
return sa.Column(sa.String(36),
primary_key=True,
default=_generate_unicode_uuid)
class Action(mb.HealingBase):
"""Contains info about actions."""
__tablename__ = 'actions'
__table_args__ = (
sa.UniqueConstraint('id'),
)
id = _id_column()<|fim▁hole|> project_id = sa.Column(sa.String(80), nullable=True)
request_id = sa.Column(sa.String(80), nullable=True)
internal_data = sa.Column(sa.String(200), nullable=True)
output = sa.Column(sa.Text(), nullable=True)
class SLAContract(mb.HealingBase):
"""Contains info about the SLA contracts."""
__tablename__ = 'sla_contract'
__table_args__ = (
sa.UniqueConstraint('id'),
)
id = _id_column()
project_id = sa.Column(sa.String(80), nullable=True)
type = sa.Column(sa.String(255), nullable=True)
value = sa.Column(sa.String(255), nullable=True)
name = sa.Column(sa.String(255), nullable=True)
action = sa.Column(sa.String(255), nullable=True)
resource_id = sa.Column(sa.String(255), nullable=True)
action_options = sa.Column(sa.String(255), nullable=True)
class AlarmTrack(mb.HealingBase):
"""Contains info about the ALARMs."""
__tablename__ = 'alarm_track'
__table_args__ = (
sa.UniqueConstraint('id'),
)
id = _id_column()
alarm_id = sa.Column(sa.String(80))
contract_id = sa.Column(sa.String(80))
type = sa.Column(sa.String(100))
meter = sa.Column(sa.String(100))
threshold = sa.Column(sa.String(20))
operator = sa.Column(sa.String(5))
period = sa.Column(sa.Integer(), default=10)
evaluation_period = sa.Column(sa.Integer(), default=1)
name = sa.Column(sa.String(255))
query = sa.Column(sa.String(255))
statistic = sa.Column(sa.String(255))
# if not tru SLA
action = sa.Column(sa.String(255))
class FailureTrack(mb.HealingBase):
"""Contains info about the SLA contracts."""
__tablename__ = 'failure_track'
__table_args__ = (
sa.UniqueConstraint('id'),
)
id = _id_column()
alarm_id = sa.Column(sa.String(255))
data = sa.Column(sa.String(255), nullable=True)
contract_names = sa.Column(sa.String(255), nullable=True)<|fim▁end|> | name = sa.Column(sa.String(80))
status = sa.Column(sa.String(20), nullable=True, default='init')
action_meta = sa.Column(sa.String(200), nullable=True)
target_id = sa.Column(sa.String(80)) |
<|file_name|>index.test-d.ts<|end_file_name|><|fim▁begin|>import {expectType} from 'tsd';
import imageType = require('.');
import {ImageTypeResult, ImageType} from '.';
imageType(Buffer.from([0xff, 0xd8, 0xff]));
imageType(new Uint8Array([0xff, 0xd8, 0xff]));
expectType<ImageTypeResult | null>(imageType(Buffer.from([0xff, 0xd8, 0xff])));
expectType<ImageTypeResult | null>(
imageType(new Uint8Array([0xff, 0xd8, 0xff]))
);<|fim▁hole|>const result = imageType(Buffer.from([0xff, 0xd8, 0xff]));
if (result != null) {
expectType<ImageType>(result.ext);
expectType<string>(result.mime);
}
expectType<number>(imageType.minimumBytes);<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use super::{Storage, StorageResult, StorageError};
use db::storage::Page;
use super::{Row, RowBuilder};
// in memory storage. has a bunch of pages in a vector
pub struct Memory {
pages: Vec<Page>
}
impl Memory {
pub fn new() -> StorageResult<Memory> {
Ok(Memory{pages:Vec::new()})<|fim▁hole|>}
impl Storage for Memory {
fn get_page(&self, num: u64) -> StorageResult<Page> {
Err(StorageError::PageNotFound)
}
fn write_page(&mut self, page: &Page) -> StorageResult<()> {
unimplemented!()
}
}<|fim▁end|> | } |
<|file_name|>Header.tsx<|end_file_name|><|fim▁begin|>import React, { Component } from 'react';
import styled from 'styled-components';
import Container from './Container';<|fim▁hole|>import TopNav from './TopNav';
import logo from '../assets/images/header-logo.png';
const PaddedContainer = Container.extend`
padding-top: 40px;
padding-bottom: 40px;
display: flex;
justify-content: space-between;
align-items: center;
`;
const Logo = styled.div`
width: 283px;
height: 70px;
background: url(${logo}) no-repeat;
`;
const Header: React.StatelessComponent = () => (
<header>
<PaddedContainer>
<Logo />
<TopNav />
</PaddedContainer>
</header>
);
export default Header;<|fim▁end|> | |
<|file_name|>LocalizedString.java<|end_file_name|><|fim▁begin|>package com.ocdsoft.bacta.swg.shared.localization;
/**<|fim▁hole|> */
public class LocalizedString {
}<|fim▁end|> | * Created by crush on 11/21/2015. |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import argparse
import collections
import io
import json
import logging
import os
# pylint: disable=import-error
import queue
import random
import re
import requests
import sys
import traceback
import threading
import time
from ..core import provider
from ..utils import rnd
from . import window
logger = logging.getLogger('dice')
class _TestThread(threading.Thread):
"""
Thread class for running the main tests.
"""
def __init__(self, exc_queue, app, **kwargs):
threading.Thread.__init__(self, **kwargs)
self.exc_queue = exc_queue
self.app = app
def run(self):
try:
self.app.run_tests()
# pylint: disable=broad-except
except Exception:
self.exc_queue.put(sys.exc_info())
class _TestStat(object):
"""
Class to store the tests and statistics information.
"""
def __init__(self, key, queue_max=100, method='exact'):
self.key = key
self.counter = 0
self.queue_max = queue_max
self.method = method
self.queue = collections.deque([], queue_max)
def match(self, text):
if self.method == 'exact':
return text == self.key
elif self.method == 'regex':
return re.match(self.key + '$', text)
def append(self, result):
self.counter += 1
self.queue.append(result)
def extend(self, stat):
for result in stat.queue:
self.append(result)
class DiceApp(object):
"""
Curses-based DICE client application.
"""
def __init__(self):
self.parser = argparse.ArgumentParser()
self.parser.add_argument(
'providers',
nargs='?',
action='store',
help="list of test providers separated by ','. Default to current "
"working directory",
default=os.getcwd(),
)
self.parser.add_argument(
'--server',
action='store',
help='server address',
dest='server',
default=None,
)
self.parser.add_argument(
'--port',
action='store',
help='server port',
dest='port',
default='8067',
)
self.parser.add_argument(
'--username',
action='store',<|fim▁hole|> help='server authentication user name',
dest='username',
)
self.parser.add_argument(
'--password',
action='store',
help='server authentication password',
dest='password',
)
self.parser.add_argument(
'--no-ui',
action='store_false',
help="don't show terminal interactive user interface.",
dest='ui',
default=True,
)
self.args, _ = self.parser.parse_known_args()
try:
self.providers = self._process_providers()
except provider.ProviderError as detail:
exit(detail)
self.stats = {
"skip": {},
"failure": {},
"success": {},
"timeout": {},
"expected_neg": {},
"unexpected_neg": {},
"unexpected_pass": {},
}
self.QUEUE_MAX = 100
self.exiting = False
self.pause = False
self.setting_watch = False
self.show_log = False
self.watching = ''
self.scroll_x = 0
self.scroll_y = 0
self.test_excs = queue.Queue()
self.test_thread = _TestThread(self.test_excs, self)
self.send_queue = []
self.last_send_thread = None
self.last_item = None
self.cur_counter = 'failure'
if self.args.ui:
self.window = window.Window(self)
self.window.stat_panel.set_select_callback(self._update_items)
self.window.stat_panel.add_keypress_listener(
'merge_stat', 'm', self._merge_stat)
self.window.items_panel.set_select_callback(self._update_content)
self.stream = io.StringIO()
self.cur_class = (None, None)
self.cur_item = (None, None)
def _update_items(self, cat_name, item_idx):
self.cur_class = (cat_name, item_idx)
def _update_content(self, cat_name, item_idx):
self.cur_item = (cat_name, item_idx)
def _merge_stat(self, panel):
self.pause = True
cat_name, _ = panel.cur_key
text = self.window.get_input()
match_keys = []
for key in self.stats[cat_name]:
res = re.match(text, key)
if res is not None:
match_keys.append(key)
stat = self.stats[cat_name][text] = _TestStat(text, method='regex')
for key in match_keys:
stat.extend(self.stats[cat_name][key])
del self.stats[cat_name][key]
self.pause = False
def _stat_result(self, item):
"""
Categorizes and keep the count of a result of a test item depends on
the expected failure patterns.
"""
res = item.res
fail_patts = item.fail_patts
key = res.stderr
catalog = None
if res:
if res.exit_status == 'timeout':
catalog = 'timeout'
if self.watching and self.watching in res.stderr:
self.pause = True
if fail_patts:
if res.exit_status == 'success':
catalog = 'unexpected_pass'
elif res.exit_status == 'failure':
found = False
for patt in fail_patts:
if re.search(patt, res.stderr):
catalog = 'expected_neg'
key = patt
found = True
break
if not found:
catalog = 'unexpected_neg'
else:
if res.exit_status == 'success':
catalog = 'success'
elif res.exit_status == 'failure':
catalog = 'failure'
else:
catalog = 'skip'
found = False
for stat in self.stats[catalog].values():
if stat.match(key):
found = True
key = stat.key
break
if not found:
self.stats[catalog][key] = _TestStat(key)
stat = self.stats[catalog][key]
stat.append(res)
def _process_providers(self):
"""
Print a list of available providers if --list-providers is set
or return a dict of specified providers.
"""
providers = {}
if self.args.providers:
for path in self.args.providers.split(','):
prvdr = provider.Provider(path)
providers[prvdr.name] = prvdr
else:
sys.exit('Error: --providers option not specified')
return providers
def _send(self, item_queue):
"""
Serialize a list of test results and send them to remote server.
"""
content = []
for item in item_queue:
content.append(item.serialize())
data = json.dumps(content)
headers = {}
headers['content-type'] = 'application/json'
url = 'http://%s:%s/api/tests/' % (self.args.server, self.args.port)
try:
response = requests.post(
url,
data=data,
headers=headers,
auth=(self.args.username, self.args.password),
)
if response.status_code != 201:
logger.debug('Failed to send result (HTTP%s):',
response.status_code)
if 'DOCTYPE' in response.text:
html_path = 'debug_%s.html' % rnd.regex('[a-z]{4}')
with open(html_path, 'w') as fp:
fp.write(response.text)
logger.debug('Html response saved to %s',
os.path.abspath(html_path))
else:
logger.debug(response.text)
except requests.ConnectionError as detail:
logger.debug('Failed to send result to server: %s', detail)
def run_tests(self):
"""
Iteratively run tests.
"""
while not self.exiting:
item = random.choice(self.providers.values()).generate()
item.run()
self.last_item = item
if self.args.server is not None:
self.send_queue.append(item)
if len(self.send_queue) > 200:
if self.last_send_thread:
self.last_send_thread.join()
send_thread = threading.Thread(
target=self._send,
args=(self.send_queue,)
)
send_thread.start()
self.last_send_thread = send_thread
self.send_queue = []
self._stat_result(item)
if self.pause:
while self.pause and not self.exiting:
time.sleep(0.5)
def update_window(self):
"""
Update the content of curses window and refresh it.
"""
# Set statistics panel content
panel = self.window.stat_panel
panel.clear()
for cat_name in self.stats:
for key, stat in self.stats[cat_name].items():
bundle = {'key': key, 'count': stat.counter}
panel.add_item(bundle, catalog=cat_name)
# Set items panel content
panel = self.window.items_panel
panel.clear()
cat_name, item_idx = self.cur_class
if cat_name is not None and item_idx is not None:
item_name, stat = self.stats[cat_name].items()[item_idx]
try:
for item in self.stats[cat_name][item_name].queue:
bundle = {'item': item.cmdline}
panel.add_item(bundle)
except RuntimeError:
pass
# Set detail panel content
panel = self.window.detail_panel
panel.clear()
cat_name, item_idx = self.cur_class
if cat_name is not None and item_idx is not None:
item_name, stat = self.stats[cat_name].items()[item_idx]
items = self.stats[cat_name][item_name].queue
item_name, item_idx = self.cur_item
if item_name is not None and item_idx is not None:
bundle = items[self.cur_item[1]]
panel.set_content(bundle)
self.window.update()
def run(self):
"""
Main loop to run tests, update screen and send tests results.
"""
shandler = logging.StreamHandler(self.stream)
logger.setLevel(logging.WARNING)
for handler in logger.handlers:
logger.removeHandler(handler)
logger.addHandler(shandler)
os.environ["EDITOR"] = "echo"
self.last_item = None
if self.args.ui:
try:
self.test_thread.start()
while True:
if self.args.ui:
self.update_window()
if self.exiting:
break
if not self.test_thread.isAlive():
break
except KeyboardInterrupt:
pass
finally:
if self.args.ui:
self.window.destroy()
self.exiting = True
self.test_thread.join()
try:
exc = self.test_excs.get(block=False)
for line in traceback.format_exception(*exc):
print(line, end='')
except queue.Empty:
pass
else:
self.run_tests()<|fim▁end|> | |
<|file_name|>derive_invalid_url.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use juniper::GraphQLScalar;
#[derive(GraphQLScalar)]
#[graphql(specified_by_url = "not an url", transparent)]
struct ScalarSpecifiedByUrl(i64);
fn main() {}<|fim▁end|> | |
<|file_name|>FormPanel.java<|end_file_name|><|fim▁begin|>package view;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyEvent;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.DefaultComboBoxModel;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JFormattedTextField;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JSeparator;
import javax.swing.JTextField;
import javax.swing.border.Border;
import model.Manufacturer;
import model.Model;
public class FormPanel extends JPanel {
private JLabel nameLabel;
private JLabel phoneLabel;
private JLabel creationDateLabel;
private JLabel carInfoLabel;
private JTextField nameField;
private JTextField phoneField;
private JFormattedTextField creationDateField;
private static final String CMAN_NOT_SELECTABLE_OPTION = "-Válassz autógyártót-";
private JComboBox<String> carManComboBox;
private static final String CMOD_NOT_SELECTABLE_OPTION = "-Válassz autótípust-";
private JComboBox<String> carModelComboBox;
private JTextField licensePlateNoField;
private JTextField yearField;
private JTextField colorField;
private JTextField odometerField;
private JLabel licensePlateNoLabel;
private JLabel yearLabel;
private JLabel colorLabel;
private JLabel odometerLabel;
private JButton okBtn;
private FormListener formListener;
private CarManChooserListener carManChooserListener;
public FormPanel(List<Manufacturer> manufacturers) {
Dimension dim = getPreferredSize();
dim.width = 250;
setPreferredSize(dim);
nameLabel = new JLabel("*Név: ");
phoneLabel = new JLabel("Telefonszám: ");
creationDateLabel = new JLabel("Mai dátum: ");
carInfoLabel = new JLabel("Gépjármű információk: ");
nameField = new JTextField(10);
phoneField = new JTextField(10);
carManComboBox = new JComboBox<String>();
carModelComboBox = new JComboBox<String>();
licensePlateNoField = new JTextField(10);
yearField = new JTextField(10);
colorField = new JTextField(10);
odometerField = new JTextField(10);
licensePlateNoLabel = new JLabel("Rendszám: ");
yearLabel = new JLabel("Gyártási év: ");
colorLabel = new JLabel("Szín: ");
odometerLabel = new JLabel("Km-óra állás: ");
licensePlateNoField.setText("*Rendszám");
licensePlateNoField.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent e) {
if(licensePlateNoField.getText().trim().equals(""))
licensePlateNoField.setText("*Rendszám");
}
public void focusGained(FocusEvent e) {
if(licensePlateNoField.getText().trim().equals("*Rendszám"))
licensePlateNoField.setText("");
}
});
yearField.setText("Gyártási év");
yearField.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent e) {
if(yearField.getText().trim().equals(""))
yearField.setText("Gyártási év");
}
public void focusGained(FocusEvent e) {
if(yearField.getText().trim().equals("Gyártási év"))
yearField.setText("");
}
});
colorField.setText("Gépjármű színe");
colorField.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent e) {
if(colorField.getText().trim().equals(""))
colorField.setText("Gépjármű színe");
}
public void focusGained(FocusEvent e) {
if(colorField.getText().trim().equals("Gépjármű színe"))
colorField.setText("");
}
});
odometerField.setText("Kilóméteróra állás");
odometerField.addFocusListener(new FocusListener() {
public void focusLost(FocusEvent e) {
if(odometerField.getText().trim().equals(""))
odometerField.setText("Kilóméteróra állás");
}
public void focusGained(FocusEvent e) {
if(odometerField.getText().trim().equals("Kilóméteróra állás"))
odometerField.setText("");
}
});
carModelComboBox.setVisible(false);
carManComboBox.setModel(new DefaultComboBoxModel<String>() {
boolean selectionAllowed = true;
public void setSelectedItem(Object anObject) {
if (!CMAN_NOT_SELECTABLE_OPTION.equals(anObject)) {
super.setSelectedItem(anObject);
} else if (selectionAllowed) {
// Allow this just once
selectionAllowed = false;
super.setSelectedItem(anObject);
}
}
});
carModelComboBox.setModel(new DefaultComboBoxModel<String>() {
boolean selectionAllowed = true;
public void setSelectedItem(Object anObject) {
if (!CMOD_NOT_SELECTABLE_OPTION.equals(anObject)) {
super.setSelectedItem(anObject);
} else if (selectionAllowed) {
// Allow this just once
selectionAllowed = false;
super.setSelectedItem(anObject);
}
}
});
carManComboBox.addItem(CMAN_NOT_SELECTABLE_OPTION);
for (Manufacturer man : manufacturers) {
carManComboBox.addItem(man.getManufacturerName());
}
Date todaysDate = new Date();
SimpleDateFormat dateformat = new SimpleDateFormat(
"yyyy-MM-dd HH:mm:ss");
creationDateField = new JFormattedTextField(
dateformat.format(todaysDate));
creationDateField.setEditable(false);
okBtn = new JButton("OK");
okBtn.setMnemonic(KeyEvent.VK_O);
nameLabel.setDisplayedMnemonic(KeyEvent.VK_N);
nameLabel.setLabelFor(nameField);
phoneLabel.setDisplayedMnemonic(KeyEvent.VK_T);
phoneLabel.setLabelFor(phoneField);
carManComboBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String carMan = (String) carManComboBox.getSelectedItem();
CarManChooserEvent eCM = new CarManChooserEvent(this, carMan);
if (carManChooserListener != null) {
carManChooserListener.carManChooserEventOccured(eCM);
}
}
});
okBtn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
String name = nameField.getText();
String phoneNum = phoneField.getText();
Date creationDate = new Date();
String carMan = (String) carManComboBox.getSelectedItem();
String carModel = (String) carModelComboBox.getSelectedItem();
String licensePlateNo = licensePlateNoField.getText();<|fim▁hole|> String odometer = odometerField.getText();
if( year.equals("Gyártási év") ){
year = "";
}
if( color.equals("Gépjármű színe") ){
color = "";
}
if (name.equals("")) {
JOptionPane.showMessageDialog(FormPanel.this,
"A 'Név' mező nem maradhat üresen!",
"Adatbázis hiba", JOptionPane.ERROR_MESSAGE);
} else if(carMan.equals(CMAN_NOT_SELECTABLE_OPTION)){
JOptionPane.showMessageDialog(FormPanel.this,
"Autógyártó választása kötelező!",
"Adatbázis hiba", JOptionPane.ERROR_MESSAGE);
} else if(carModel.equals(CMOD_NOT_SELECTABLE_OPTION)){
JOptionPane.showMessageDialog(FormPanel.this,
"Autótípus választása kötelező!",
"Adatbázis hiba", JOptionPane.ERROR_MESSAGE);
} else if(licensePlateNo.equals("*Rendszám")){
JOptionPane.showMessageDialog(FormPanel.this,
"Rendszám megadása kötelező!",
"Adatbázis hiba", JOptionPane.ERROR_MESSAGE);
} else if(!odometer.equals("Kilóméteróra állás") && !isInteger(odometer) ){
JOptionPane.showMessageDialog(FormPanel.this,
"A kilóméteróra állás szám kell hogy legyen!",
"Adatbázis hiba", JOptionPane.ERROR_MESSAGE);
} else {
Integer odoMeter;
if( odometer.equals("Kilóméteróra állás") ){
odoMeter = 0;
} else{
odoMeter = Integer.parseInt(odometer);
}
nameField.setText("");
phoneField.setText("");
licensePlateNoField.setText("*Rendszám");
yearField.setText("Gyártási év");
colorField.setText("Gépjármű színe");
odometerField.setText("Kilóméteróra állás");
FormEvent ev = new FormEvent(this, name, phoneNum, creationDate,
carMan, carModel,
licensePlateNo, year, color, odoMeter);
if (formListener != null) {
formListener.formEventOccured(ev);
}
}
}
});
Border innerBorder = BorderFactory.createTitledBorder("Új ügyfél");
Border outerBorder = BorderFactory.createEmptyBorder(5, 5, 5, 5);
setBorder(BorderFactory.createCompoundBorder(outerBorder, innerBorder));
layoutComponents();
}
public void layoutComponents() {
setLayout(new GridBagLayout());
GridBagConstraints gc = new GridBagConstraints();
// Name label and name field
gc.gridy = 0;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.fill = GridBagConstraints.NONE;
gc.anchor = GridBagConstraints.LINE_END;
gc.insets = new Insets(0, 0, 0, 5);
add(nameLabel, gc);
gc.gridx++;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(nameField, gc);
// Phone label and name field
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_END;
gc.insets = new Insets(0, 0, 0, 5);
add(phoneLabel, gc);
gc.gridx++;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(phoneField, gc);
// Date
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_END;
gc.insets = new Insets(0, 0, 0, 5);
add(creationDateLabel, gc);
gc.gridx++;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(creationDateField, gc);
////////////// Separator //////////////
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1.0;
gc.weighty = 0.3;
gc.fill = GridBagConstraints.HORIZONTAL;
gc.gridwidth = GridBagConstraints.REMAINDER;
add(new JSeparator(JSeparator.HORIZONTAL), gc);
// Car info label
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_START;
gc.fill = GridBagConstraints.NONE;
gc.insets = new Insets(0, 0, 0, 0);
add(carInfoLabel, gc);
// Car manufacturer chooser
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_END;
gc.fill = GridBagConstraints.NONE;
gc.insets = new Insets(5, 0, 0, 0);
add(carManComboBox, gc);
// Car model chooser
gc.gridy++;
gc.gridx = 0;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_END;
gc.fill = GridBagConstraints.NONE;
gc.insets = new Insets(5, 0, 0, 0);
add(carModelComboBox, gc);
// Car license plate no.
gc.gridy++;
gc.gridx = 1;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(licensePlateNoField, gc);
// Car year
gc.gridy++;
gc.gridx = 1;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(yearField, gc);
// Car color
gc.gridy++;
gc.gridx = 1;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(colorField, gc);
// Car odometer
gc.gridy++;
gc.gridx = 1;
gc.weightx = 1;
gc.weighty = 0.1;
gc.anchor = GridBagConstraints.LINE_START;
gc.insets = new Insets(0, 0, 0, 0);
add(odometerField, gc);
// OK Button
gc.gridy++;
gc.gridx = 1;
gc.weightx = 1;
gc.weighty = 1.5;
gc.anchor = GridBagConstraints.FIRST_LINE_START;
gc.fill = GridBagConstraints.NONE;
gc.insets = new Insets(0, 0, 0, 5);
add(okBtn, gc);
}
public void setFormListener(FormListener listener) {
this.formListener = listener;
}
public void setCarManChooserListener(CarManChooserListener listener) {
this.carManChooserListener = listener;
}
public void setCarModelComboBox(List<Model> models) {
this.carModelComboBox.removeAllItems();
carModelComboBox.addItem(CMOD_NOT_SELECTABLE_OPTION);
for (Model model : models) {
this.carModelComboBox.addItem(model.getModelName());
}
carModelComboBox.setVisible(true);
}
public static boolean isInteger(String str) {
int length = str.length();
int i = 0;
if (str.charAt(0) == '-') {
if (length == 1) {
return false;
}
i = 1;
}
for (; i < length; i++) {
char c = str.charAt(i);
if (c <= '/' || c >= ':') {
return false;
}
}
return true;
}
}<|fim▁end|> | String year = yearField.getText();
String color = colorField.getText(); |
<|file_name|>Acceptor.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tier;
import java.io.IOException;
import org.apache.geode.internal.Version;
import org.apache.geode.internal.cache.tier.sockets.CacheClientNotifier;
/**
* Defines the message listener/acceptor interface which is the GemFire Bridge Server. Multiple
* communication stacks may provide implementations for the interfaces defined in this package
*
* @since GemFire 2.0.2
*/
public interface Acceptor {
/**
* The GFE version of the server.
*
* @since GemFire 5.7
*/
Version VERSION = Version.CURRENT.getGemFireVersion();
/**
* Listens for a client to connect and establishes a connection to that client.
*/
void accept() throws Exception;<|fim▁hole|> */
void start() throws IOException;
/**
* Returns the port on which this acceptor listens for connections from clients.
*/
int getPort();
/**
* returns the server's name string, including the inet address and port that the server is
* listening on
*/
String getServerName();
/**
* Closes this acceptor thread
*/
void close();
/**
* Is this acceptor running (handling connections)?
*/
boolean isRunning();
/**
* Returns the CacheClientNotifier used by this Acceptor.
*/
CacheClientNotifier getCacheClientNotifier();
}<|fim▁end|> |
/**
* Starts this acceptor thread |
<|file_name|>bootstrap_commands.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Servo Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
from __future__ import absolute_import, print_function, unicode_literals
import base64
import json
import os
import os.path as path
import re
import shutil
import sys
import urllib2
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
import servo.bootstrap as bootstrap
from servo.command_base import CommandBase, BIN_SUFFIX
from servo.util import download_bytes, download_file, extract, host_triple
@CommandProvider
class MachCommands(CommandBase):
@Command('env',
description='Print environment setup commands',
category='bootstrap')
def env(self):
env = self.build_env()
print("export PATH=%s" % env["PATH"])
if sys.platform == "darwin":
print("export DYLD_LIBRARY_PATH=%s" % env["DYLD_LIBRARY_PATH"])
else:
print("export LD_LIBRARY_PATH=%s" % env["LD_LIBRARY_PATH"])
@Command('bootstrap',
description='Install required packages for building.',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Boostrap without confirmation')
def bootstrap(self, force=False):
return bootstrap.bootstrap(self.context, force=force)
@Command('bootstrap-rust',
description='Download the Rust compiler',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if a copy already exists')
@CommandArgument('--target',
action='append',
default=[],
help='Download rust stdlib for specified target')
@CommandArgument('--stable',
action='store_true',
help='Use stable rustc version')
def bootstrap_rustc(self, force=False, target=[], stable=False):
self.set_use_stable_rust(stable)
version = self.rust_version()
rust_path = self.rust_path()
rust_dir = path.join(self.context.sharedir, "rust", rust_path)
install_dir = path.join(self.context.sharedir, "rust", version)
if not self.config["build"]["llvm-assertions"]:
install_dir += "-alt"
if not force and path.exists(path.join(rust_dir, "rustc", "bin", "rustc" + BIN_SUFFIX)):
print("Rust compiler already downloaded.", end=" ")
print("Use |bootstrap-rust --force| to download again.")
else:
if path.isdir(rust_dir):
shutil.rmtree(rust_dir)
os.makedirs(rust_dir)
# The nightly Rust compiler is hosted on the nightly server under the date with a name
# rustc-nightly-HOST-TRIPLE.tar.gz, whereas the stable compiler is named
# rustc-VERSION-HOST-TRIPLE.tar.gz. We just need to pull down and extract it,
# giving a directory name that will be the same as the tarball name (rustc is
# in that directory).
if stable:
tarball = "rustc-%s-%s.tar.gz" % (version, host_triple())
rustc_url = "https://static-rust-lang-org.s3.amazonaws.com/dist/" + tarball
else:<|fim▁hole|> base_url += "-alt"
rustc_url = base_url + "/" + tarball
tgz_file = rust_dir + '-rustc.tar.gz'
download_file("Rust compiler", rustc_url, tgz_file)
print("Extracting Rust compiler...")
extract(tgz_file, install_dir)
print("Rust compiler ready.")
# Each Rust stdlib has a name of the form `rust-std-nightly-TRIPLE.tar.gz` for the nightly
# releases, or rust-std-VERSION-TRIPLE.tar.gz for stable releases, with
# a directory of the name `rust-std-TRIPLE` inside and then a `lib` directory.
# This `lib` directory needs to be extracted and merged with the `rustc/lib`
# directory from the host compiler above.
nightly_suffix = "" if stable else "-nightly"
stable_version = "-{}".format(version) if stable else ""
lib_dir = path.join(install_dir,
"rustc{}{}-{}".format(nightly_suffix, stable_version, host_triple()),
"rustc", "lib", "rustlib")
# ensure that the libs for the host's target is downloaded
host_target = host_triple()
if host_target not in target:
target.append(host_target)
for target_triple in target:
target_lib_dir = path.join(lib_dir, target_triple)
if path.exists(target_lib_dir):
# No need to check for force. If --force the directory is already deleted
print("Rust lib for target {} already downloaded.".format(target_triple), end=" ")
print("Use |bootstrap-rust --force| to download again.")
continue
if self.use_stable_rust():
std_url = ("https://static-rust-lang-org.s3.amazonaws.com/dist/rust-std-%s-%s.tar.gz"
% (version, target_triple))
tgz_file = install_dir + ('rust-std-%s-%s.tar.gz' % (version, target_triple))
else:
std_url = ("https://s3.amazonaws.com/rust-lang-ci/rustc-builds/%s/rust-std-nightly-%s.tar.gz"
% (version, target_triple))
tgz_file = install_dir + ('rust-std-nightly-%s.tar.gz' % target_triple)
download_file("Host rust library for target %s" % target_triple, std_url, tgz_file)
print("Extracting Rust stdlib for target %s..." % target_triple)
extract(tgz_file, install_dir)
shutil.copytree(path.join(install_dir,
"rust-std%s%s-%s" % (nightly_suffix, stable_version, target_triple),
"rust-std-%s" % target_triple, "lib", "rustlib", target_triple),
path.join(install_dir,
"rustc%s%s-%s" % (nightly_suffix, stable_version, host_triple()),
"rustc", "lib", "rustlib", target_triple))
shutil.rmtree(path.join(install_dir,
"rust-std%s%s-%s" % (nightly_suffix, stable_version, target_triple)))
print("Rust {} libs ready.".format(target_triple))
@Command('bootstrap-rust-docs',
description='Download the Rust documentation',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if docs already exist')
def bootstrap_rustc_docs(self, force=False):
self.ensure_bootstrapped()
rust_root = self.config["tools"]["rust-root"]
docs_dir = path.join(rust_root, "doc")
if not force and path.exists(docs_dir):
print("Rust docs already downloaded.", end=" ")
print("Use |bootstrap-rust-docs --force| to download again.")
return
if path.isdir(docs_dir):
shutil.rmtree(docs_dir)
docs_name = self.rust_path().replace("rustc-", "rust-docs-")
docs_url = ("https://static-rust-lang-org.s3.amazonaws.com/dist/rust-docs-nightly-%s.tar.gz"
% host_triple())
tgz_file = path.join(rust_root, 'doc.tar.gz')
download_file("Rust docs", docs_url, tgz_file)
print("Extracting Rust docs...")
temp_dir = path.join(rust_root, "temp_docs")
if path.isdir(temp_dir):
shutil.rmtree(temp_dir)
extract(tgz_file, temp_dir)
shutil.move(path.join(temp_dir, docs_name.split("/")[1],
"rust-docs", "share", "doc", "rust", "html"),
docs_dir)
shutil.rmtree(temp_dir)
print("Rust docs ready.")
@Command('bootstrap-cargo',
description='Download the Cargo build tool',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Force download even if cargo already exists')
def bootstrap_cargo(self, force=False):
cargo_dir = path.join(self.context.sharedir, "cargo",
self.cargo_build_id())
if not force and path.exists(path.join(cargo_dir, "cargo", "bin", "cargo" + BIN_SUFFIX)):
print("Cargo already downloaded.", end=" ")
print("Use |bootstrap-cargo --force| to download again.")
return
if path.isdir(cargo_dir):
shutil.rmtree(cargo_dir)
os.makedirs(cargo_dir)
tgz_file = "cargo-nightly-%s.tar.gz" % host_triple()
nightly_url = "https://s3.amazonaws.com/rust-lang-ci/cargo-builds/%s/%s" % \
(self.cargo_build_id(), tgz_file)
download_file("Cargo nightly", nightly_url, tgz_file)
print("Extracting Cargo nightly...")
nightly_dir = path.join(cargo_dir,
path.basename(tgz_file).replace(".tar.gz", ""))
extract(tgz_file, cargo_dir, movedir=nightly_dir)
print("Cargo ready.")
@Command('update-hsts-preload',
description='Download the HSTS preload list',
category='bootstrap')
def bootstrap_hsts_preload(self, force=False):
preload_filename = "hsts_preload.json"
preload_path = path.join(self.context.topdir, "resources")
chromium_hsts_url = "https://chromium.googlesource.com/chromium/src" + \
"/net/+/master/http/transport_security_state_static.json?format=TEXT"
try:
content_base64 = download_bytes("Chromium HSTS preload list", chromium_hsts_url)
except urllib2.URLError:
print("Unable to download chromium HSTS preload list; are you connected to the internet?")
sys.exit(1)
content_decoded = base64.b64decode(content_base64)
# The chromium "json" has single line comments in it which, of course,
# are non-standard/non-valid json. Simply strip them out before parsing
content_json = re.sub(r'(^|\s+)//.*$', '', content_decoded, flags=re.MULTILINE)
try:
pins_and_static_preloads = json.loads(content_json)
entries = {
"entries": [
{
"host": e["name"],
"include_subdomains": e.get("include_subdomains", False)
}
for e in pins_and_static_preloads["entries"]
]
}
with open(path.join(preload_path, preload_filename), 'w') as fd:
json.dump(entries, fd, indent=4)
except ValueError, e:
print("Unable to parse chromium HSTS preload list, has the format changed?")
sys.exit(1)
@Command('update-pub-domains',
description='Download the public domains list and update resources/public_domains.txt',
category='bootstrap')
def bootstrap_pub_suffix(self, force=False):
list_url = "https://publicsuffix.org/list/public_suffix_list.dat"
dst_filename = path.join(self.context.topdir, "resources", "public_domains.txt")
not_implemented_case = re.compile(r'^[^*]+\*')
try:
content = download_bytes("Public suffix list", list_url)
except urllib2.URLError:
print("Unable to download the public suffix list; are you connected to the internet?")
sys.exit(1)
lines = [l.strip() for l in content.decode("utf8").split("\n")]
suffixes = [l for l in lines if not l.startswith("//") and not l == ""]
with open(dst_filename, "wb") as fo:
for suffix in suffixes:
if not_implemented_case.match(suffix):
print("Warning: the new list contains a case that servo can't handle: %s" % suffix)
fo.write(suffix.encode("idna") + "\n")
@Command('clean-nightlies',
description='Clean unused nightly builds of Rust and Cargo',
category='bootstrap')
@CommandArgument('--force', '-f',
action='store_true',
help='Actually remove stuff')
def clean_nightlies(self, force=False):
rust_current = self.rust_path().split('/')[0]
cargo_current = self.cargo_build_id()
print("Current Rust version: " + rust_current)
print("Current Cargo version: " + cargo_current)
removing_anything = False
for current, base in [(rust_current, "rust"), (cargo_current, "cargo")]:
base = path.join(self.context.sharedir, base)
for name in os.listdir(base):
if name != current:
removing_anything = True
name = path.join(base, name)
if force:
print("Removing " + name)
if os.path.isdir(name):
shutil.rmtree(name)
else:
os.remove(name)
else:
print("Would remove " + name)
if not removing_anything:
print("Nothing to remove.")
elif not force:
print("Nothing done. "
"Run `./mach clean-nightlies -f` to actually remove.")<|fim▁end|> | tarball = "%s/rustc-nightly-%s.tar.gz" % (version, host_triple())
base_url = "https://s3.amazonaws.com/rust-lang-ci/rustc-builds"
if not self.config["build"]["llvm-assertions"]: |
<|file_name|>net.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import properties
from heat.engine.resources.openstack.neutron import neutron
from heat.engine import support
class Net(neutron.NeutronResource):
PROPERTIES = (
NAME, VALUE_SPECS, ADMIN_STATE_UP, TENANT_ID, SHARED,
DHCP_AGENT_IDS, PORT_SECURITY_ENABLED,
) = (
'name', 'value_specs', 'admin_state_up', 'tenant_id', 'shared',
'dhcp_agent_ids', 'port_security_enabled',
)
ATTRIBUTES = (
STATUS, NAME_ATTR, SUBNETS, ADMIN_STATE_UP_ATTR, TENANT_ID_ATTR,
PORT_SECURITY_ENABLED_ATTR, MTU_ATTR,
) = (
"status", "name", "subnets", "admin_state_up", "tenant_id",
"port_security_enabled", "mtu",
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('A string specifying a symbolic name for the network, which is '
'not required to be unique.'),
update_allowed=True
),
VALUE_SPECS: properties.Schema(
properties.Schema.MAP,
_('Extra parameters to include in the "network" object in the '
'creation request. Parameters are often specific to installed '
'hardware or extensions.'),
default={},
update_allowed=True
),
ADMIN_STATE_UP: properties.Schema(
properties.Schema.BOOLEAN,
_('A boolean value specifying the administrative status of the '
'network.'),
default=True,
update_allowed=True
),
TENANT_ID: properties.Schema(
properties.Schema.STRING,
_('The ID of the tenant which will own the network. Only '
'administrative users can set the tenant identifier; this '
'cannot be changed using authorization policies.')
),
SHARED: properties.Schema(
properties.Schema.BOOLEAN,
_('Whether this network should be shared across all tenants. '
'Note that the default policy setting restricts usage of this '
'attribute to administrative users only.'),
default=False,
update_allowed=True
),
DHCP_AGENT_IDS: properties.Schema(
properties.Schema.LIST,
_('The IDs of the DHCP agent to schedule the network. Note that '
'the default policy setting in Neutron restricts usage of this '
'property to administrative users only.'),
update_allowed=True
),
PORT_SECURITY_ENABLED: properties.Schema(
properties.Schema.BOOLEAN,
_('Flag to enable/disable port security on the network. It '
'provides the default value for the attribute of the ports '
'created on this network'),
update_allowed=True,
support_status=support.SupportStatus(version='5.0.0')
),
}
attributes_schema = {
STATUS: attributes.Schema(
_("The status of the network."),
type=attributes.Schema.STRING
),
NAME_ATTR: attributes.Schema(
_("The name of the network."),
type=attributes.Schema.STRING
),
SUBNETS: attributes.Schema(
_("Subnets of this network."),
type=attributes.Schema.LIST
),
ADMIN_STATE_UP_ATTR: attributes.Schema(
_("The administrative status of the network."),
type=attributes.Schema.STRING
),
TENANT_ID_ATTR: attributes.Schema(
_("The tenant owning this network."),
type=attributes.Schema.STRING
),
PORT_SECURITY_ENABLED_ATTR: attributes.Schema(
_("Port security enabled of the network."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.BOOLEAN
),
MTU_ATTR: attributes.Schema(
_("The maximum transmission unit size(in bytes) for the network."),
support_status=support.SupportStatus(version='5.0.0'),
type=attributes.Schema.INTEGER
),
}
def handle_create(self):
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
dhcp_agent_ids = props.pop(self.DHCP_AGENT_IDS, None)
net = self.neutron().create_network({'network': props})['network']
self.resource_id_set(net['id'])
if dhcp_agent_ids:
self._replace_dhcp_agents(dhcp_agent_ids)
def _show_resource(self):
return self.neutron().show_network(
self.resource_id)['network']
def check_create_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def handle_delete(self):
client = self.neutron()
try:
client.delete_network(self.resource_id)
except Exception as ex:
self.client_plugin().ignore_not_found(ex)
else:
return True
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
props = self.prepare_update_properties(json_snippet)
dhcp_agent_ids = props.pop(self.DHCP_AGENT_IDS, None)
if self.DHCP_AGENT_IDS in prop_diff:
if dhcp_agent_ids is not None:
self._replace_dhcp_agents(dhcp_agent_ids)
del prop_diff[self.DHCP_AGENT_IDS]
if len(prop_diff) > 0:
self.neutron().update_network(
self.resource_id, {'network': props})
def check_update_complete(self, *args):
attributes = self._show_resource()
return self.is_built(attributes)
def _replace_dhcp_agents(self, dhcp_agent_ids):
ret = self.neutron().list_dhcp_agent_hosting_networks(
self.resource_id)
old = set([agent['id'] for agent in ret['agents']])
new = set(dhcp_agent_ids)
for dhcp_agent_id in new - old:
try:
self.neutron().add_network_to_dhcp_agent(
dhcp_agent_id, {'network_id': self.resource_id})
except Exception as ex:
# if 409 is happened, the agent is already associated.
if not self.client_plugin().is_conflict(ex):
raise
for dhcp_agent_id in old - new:
try:<|fim▁hole|> self.neutron().remove_network_from_dhcp_agent(
dhcp_agent_id, self.resource_id)
except Exception as ex:
# assume 2 patterns about status_code following:
# 404: the network or agent is already gone
# 409: the network isn't scheduled by the dhcp_agent
if not (self.client_plugin().is_conflict(ex) or
self.client_plugin().is_not_found(ex)):
raise
def resource_mapping():
return {
'OS::Neutron::Net': Net,
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.