_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q276100
|
always_iterable
|
test
|
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item. If item is
None, an empty iterable is returned.
>>> always_iterable([1,2,3])
<list_iterator...>
>>> always_iterable('foo')
<tuple_iterator...>
>>> always_iterable(None)
<tuple_iterator...>
>>> always_iterable(range(10))
<range_iterator...>
>>> def _test_func(): yield "I'm iterable"
>>> print(next(always_iterable(_test_func())))
I'm iterable
Although mappings are iterable, treat each like a singleton, as
it's more like an object than a sequence.
>>> next(always_iterable(dict(a=1)))
{'a': 1}
"""
base_types = six.text_type, bytes, collections.abc.Mapping
return more_itertools.always_iterable(item, base_type=base_types)
|
python
|
{
"resource": ""
}
|
q276101
|
suppress_exceptions
|
test
|
def suppress_exceptions(callables, *exceptions):
"""
Call each callable in callables, suppressing any exceptions supplied. If
no exception classes are supplied, all Exceptions will be suppressed.
>>> import functools
>>> c1 = functools.partial(int, 'a')
>>> c2 = functools.partial(int, '10')
>>> list(suppress_exceptions((c1, c2)))
[10]
>>> list(suppress_exceptions((c1, c2), KeyError))
Traceback (most recent call last):
...
ValueError: invalid literal for int() with base 10: 'a'
"""
if not exceptions:
exceptions = Exception,
for callable in callables:
try:
yield callable()
except exceptions:
pass
|
python
|
{
"resource": ""
}
|
q276102
|
duplicates
|
test
|
def duplicates(*iterables, **kwargs):
"""
Yield duplicate items from any number of sorted iterables of items
>>> items_a = [1, 2, 3]
>>> items_b = [0, 3, 4, 5, 6]
>>> list(duplicates(items_a, items_b))
[(3, 3)]
It won't behave as you expect if the iterables aren't ordered
>>> items_b.append(1)
>>> list(duplicates(items_a, items_b))
[(3, 3)]
>>> list(duplicates(items_a, sorted(items_b)))
[(1, 1), (3, 3)]
This function is most interesting when it's operating on a key
of more complex objects.
>>> items_a = [dict(email='[email protected]', id=1)]
>>> items_b = [dict(email='[email protected]', id=2), dict(email='other')]
>>> dupe, = duplicates(items_a, items_b, key=operator.itemgetter('email'))
>>> dupe[0]['email'] == dupe[1]['email'] == '[email protected]'
True
>>> dupe[0]['id']
1
>>> dupe[1]['id']
2
"""
key = kwargs.pop('key', lambda x: x)
assert not kwargs
zipped = more_itertools.collate(*iterables, key=key)
grouped = itertools.groupby(zipped, key=key)
groups = (
tuple(g)
for k, g in grouped
)
def has_dupes(group):
return len(group) > 1
return filter(has_dupes, groups)
|
python
|
{
"resource": ""
}
|
q276103
|
assert_ordered
|
test
|
def assert_ordered(iterable, key=lambda x: x, comp=operator.le):
"""
Assert that for all items in the iterable, they're in order based on comp
>>> list(assert_ordered(range(5)))
[0, 1, 2, 3, 4]
>>> list(assert_ordered(range(5), comp=operator.ge))
Traceback (most recent call last):
...
AssertionError: 0 < 1
>>> list(assert_ordered(range(5, 0, -1), key=operator.neg))
[5, 4, 3, 2, 1]
"""
err_tmpl = (
"{pair[0]} > {pair[1]}" if comp is operator.le else
"{pair[0]} < {pair[1]}" if comp is operator.ge else
"not {comp} {pair}"
)
for pair in more_itertools.pairwise(iterable):
keyed = tuple(map(key, pair))
assert comp(*keyed), err_tmpl.format(**locals())
yield pair[0]
yield pair[1]
|
python
|
{
"resource": ""
}
|
q276104
|
_swap_on_miss
|
test
|
def _swap_on_miss(partition_result):
"""
Given a partition_dict result, if the partition missed, swap
the before and after.
"""
before, item, after = partition_result
return (before, item, after) if item else (after, item, before)
|
python
|
{
"resource": ""
}
|
q276105
|
partition_dict
|
test
|
def partition_dict(items, key):
"""
Given an ordered dictionary of items and a key in that dict,
return an ordered dict of items before, the keyed item, and
an ordered dict of items after.
>>> od = collections.OrderedDict(zip(range(5), 'abcde'))
>>> before, item, after = partition_dict(od, 3)
>>> before
OrderedDict([(0, 'a'), (1, 'b'), (2, 'c')])
>>> item
'd'
>>> after
OrderedDict([(4, 'e')])
Like string.partition, if the key is not found in the items,
the before will contain all items, item will be None, and
after will be an empty iterable.
>>> before, item, after = partition_dict(od, -1)
>>> before
OrderedDict([(0, 'a'), ..., (4, 'e')])
>>> item
>>> list(after)
[]
"""
def unmatched(pair):
test_key, item, = pair
return test_key != key
items_iter = iter(items.items())
item = items.get(key)
left = collections.OrderedDict(itertools.takewhile(unmatched, items_iter))
right = collections.OrderedDict(items_iter)
return left, item, right
|
python
|
{
"resource": ""
}
|
q276106
|
GroupbySaved.get_first_n_queues
|
test
|
def get_first_n_queues(self, n):
"""
Run through the sequence until n queues are created and return
them. If fewer are created, return those plus empty iterables to
compensate.
"""
try:
while len(self.queues) < n:
self.__fetch__()
except StopIteration:
pass
values = list(self.queues.values())
missing = n - len(values)
values.extend(iter([]) for n in range(missing))
return values
|
python
|
{
"resource": ""
}
|
q276107
|
Reusable.reset
|
test
|
def reset(self):
"""
Resets the iterator to the start.
Any remaining values in the current iteration are discarded.
"""
self.__iterator, self.__saved = itertools.tee(self.__saved)
|
python
|
{
"resource": ""
}
|
q276108
|
parse_as_var
|
test
|
def parse_as_var(parser, token):
"""
Parse the remainder of the token, to find a "as varname" statement.
:param parser: The "parser" object that ``@register.tag`` provides.
:type parser: :class:`~django.template.Parser`
:param token: The "token" object that ``@register.tag`` provides.
:type token: :class:`~django.template.Token` or splitted bits
"""
if isinstance(token, Token):
bits = token.split_contents()
else:
bits = token
as_var = None
if len(bits) > 2 and bits[-2] == 'as':
bits = bits[:]
as_var = bits.pop()
bits.pop() # as keyword
return bits, as_var
|
python
|
{
"resource": ""
}
|
q276109
|
template_tag
|
test
|
def template_tag(library, name):
"""
Decorator to register class tags
:param library: The template tag library, typically instantiated as ``register = Library()``.
:type library: :class:`~django.template.Library`
:param name: The name of the template tag
:type name: str
Example:
.. code-block:: python
@template_tag(register, 'my_tag')
class MyTag(BaseNode):
pass
"""
def _inner(cls):
if hasattr(cls, 'parse'):
compile_function = cls.parse
else:
# Hope that it's either a function, or cls with __init__(self, parser, token) method.
compile_function = cls
library.tag(name, compile_function)
return cls # Return the class body to keep it in the namespace of the module
return _inner
|
python
|
{
"resource": ""
}
|
q276110
|
PublicKeychain.descendant
|
test
|
def descendant(self, chain_path):
""" A descendant is a child many steps down.
"""
public_child = self.hdkeychain
chain_step_bytes = 4
max_bits_per_step = 2**31
chain_steps = [
int(chain_path[i:i+chain_step_bytes*2], 16) % max_bits_per_step
for i in range(0, len(chain_path), chain_step_bytes*2)
]
for step in chain_steps:
public_child = public_child.get_child(step)
return PublicKeychain(public_child)
|
python
|
{
"resource": ""
}
|
q276111
|
SQLiteSchemaExtractor.fetch_sqlite_master
|
test
|
def fetch_sqlite_master(self):
"""
Get sqlite_master table information as a list of dictionaries.
:return: sqlite_master table information.
:rtype: list
:Sample Code:
.. code:: python
from sqliteschema import SQLiteSchemaExtractor
print(json.dumps(SQLiteSchemaExtractor("sample.sqlite").fetch_sqlite_master(), indent=4))
:Output:
.. code-block:: json
[
{
"tbl_name": "sample_table",
"sql": "CREATE TABLE 'sample_table' ('a' INTEGER, 'b' REAL, 'c' TEXT, 'd' REAL, 'e' TEXT)",
"type": "table",
"name": "sample_table",
"rootpage": 2
},
{
"tbl_name": "sample_table",
"sql": "CREATE INDEX sample_table_a_index ON sample_table('a')",
"type": "index",
"name": "sample_table_a_index",
"rootpage": 3
}
]
"""
sqlite_master_record_list = []
result = self.__cur.execute(
"SELECT {:s} FROM sqlite_master".format(", ".join(self._SQLITE_MASTER_ATTR_NAME_LIST))
)
for record in result.fetchall():
sqlite_master_record_list.append(
dict(
[
[attr_name, item]
for attr_name, item in zip(self._SQLITE_MASTER_ATTR_NAME_LIST, record)
]
)
)
return sqlite_master_record_list
|
python
|
{
"resource": ""
}
|
q276112
|
object_iter
|
test
|
def object_iter(obj, parent=None, parent_key=None, idx=None,
siblings=None):
"""Yields each node of object graph in postorder."""
obj_node = Node(value=obj, parent=parent, parent_key=parent_key,
siblings=siblings, idx=idx)
if isinstance(obj, list):
_siblings = len(obj)
for i, elem in enumerate(obj):
for node in object_iter(elem, obj_node, None, i + 1, _siblings):
yield node
elif isinstance(obj, collections.Mapping):
for key in obj:
for node in object_iter(obj[key], obj_node, key):
yield node
yield obj_node
|
python
|
{
"resource": ""
}
|
q276113
|
select
|
test
|
def select(selector, obj):
"""Appy selector to obj and return matching nodes.
If only one node is found, return it, otherwise return a list of matches.
Returns False on syntax error. None if no results found.
"""
parser = Parser(obj)
try:
return parser.parse(selector)
except SelectorSyntaxError as e:
log.exception(e)
return False
|
python
|
{
"resource": ""
}
|
q276114
|
Parser.parse
|
test
|
def parse(self, selector):
"""Accept a list of tokens. Returns matched nodes of self.obj."""
log.debug(self.obj)
tokens = lex(selector)
if self.peek(tokens, 'operator') == '*':
self.match(tokens, 'operator')
results = list(object_iter(self.obj))
else:
results = self.selector_production(tokens)
results = [node.value for node in results]
# single results should be returned as a primitive
if len(results) == 1:
return results[0]
elif not len(results):
return None
return results
|
python
|
{
"resource": ""
}
|
q276115
|
Parser.selector_production
|
test
|
def selector_production(self, tokens):
"""Production for a full selector."""
validators = []
# the following productions should return predicate functions.
if self.peek(tokens, 'type'):
type_ = self.match(tokens, 'type')
validators.append(self.type_production(type_))
if self.peek(tokens, 'identifier'):
key = self.match(tokens, 'identifier')
validators.append(self.key_production(key))
if self.peek(tokens, 'pclass'):
pclass = self.match(tokens, 'pclass')
validators.append(self.pclass_production(pclass))
if self.peek(tokens, 'nth_func'):
nth_func = self.match(tokens, 'nth_func')
validators.append(self.nth_child_production(nth_func, tokens))
if self.peek(tokens, 'pclass_func'):
pclass_func = self.match(tokens, 'pclass_func')
validators.append(self.pclass_func_production(pclass_func, tokens))
if not len(validators):
raise SelectorSyntaxError('no selector recognized.')
# apply validators from a selector expression to self.obj
results = self._match_nodes(validators, self.obj)
if self.peek(tokens, 'operator'):
operator = self.match(tokens, 'operator')
rvals = self.selector_production(tokens)
if operator == ',':
results.extend(rvals)
elif operator == '>':
results = self.parents(results, rvals)
elif operator == '~':
results = self.siblings(results, rvals)
elif operator == ' ':
results = self.ancestors(results, rvals)
else:
raise SelectorSyntaxError("unrecognized operator '%s'"
% operator)
else:
if len(tokens):
rvals = self.selector_production(tokens)
results = self.ancestors(results, rvals)
return results
|
python
|
{
"resource": ""
}
|
q276116
|
Parser.parents
|
test
|
def parents(self, lhs, rhs):
"""Find nodes in rhs which have parents in lhs."""
return [node for node in rhs if node.parent in lhs]
|
python
|
{
"resource": ""
}
|
q276117
|
Parser.ancestors
|
test
|
def ancestors(self, lhs, rhs):
"""Return nodes from rhs which have ancestors in lhs."""
def _search(node):
if node in lhs:
return True
if not node.parent:
return False
return _search(node.parent)
return [node for node in rhs if _search(node)]
|
python
|
{
"resource": ""
}
|
q276118
|
Parser.siblings
|
test
|
def siblings(self, lhs, rhs):
"""Find nodes in rhs having common parents in lhs."""
parents = [node.parent for node in lhs]
return [node for node in rhs if node.parent in parents]
|
python
|
{
"resource": ""
}
|
q276119
|
Parser.nth_child_production
|
test
|
def nth_child_production(self, lexeme, tokens):
"""Parse args and pass them to pclass_func_validator."""
args = self.match(tokens, 'expr')
pat = self.nth_child_pat.match(args)
if pat.group(5):
a = 2
b = 1 if pat.group(5) == 'odd' else 0
elif pat.group(6):
a = 0
b = int(pat.group(6))
else:
sign = pat.group(1) if pat.group(1) else '+'
coef = pat.group(2) if pat.group(2) else '1'
a = eval(sign + coef)
b = eval(pat.group(3) + pat.group(4)) if pat.group(3) else 0
reverse = False
if lexeme == 'nth-last-child':
reverse = True
def validate(node):
"""This crazy function taken from jsonselect.js:444."""
if not node.siblings:
return False
idx = node.idx - 1
tot = node.siblings
if reverse:
idx = tot - idx
else:
idx += 1
if a == 0:
m = b == idx
else:
mod = (idx - b) % a
m = not mod and (idx * a + b) >= 0
return m
return validate
|
python
|
{
"resource": ""
}
|
q276120
|
Parser._match_nodes
|
test
|
def _match_nodes(self, validators, obj):
"""Apply each validator in validators to each node in obj.
Return each node in obj which matches all validators.
"""
results = []
for node in object_iter(obj):
if all([validate(node) for validate in validators]):
results.append(node)
return results
|
python
|
{
"resource": ""
}
|
q276121
|
ping
|
test
|
def ping(dst, count, inter=0.2, maxwait=1000, size=64):
"""Sends ICMP echo requests to destination `dst` `count` times.
Returns a deferred which fires when responses are finished.
"""
def _then(result, p):
p.stopListening()
return result
d = defer.Deferred()
p = ICMPPort(0, ICMPPing(d, dst, count, inter, maxwait, size), "", 8192, reactor)
p.startListening()
return d.addCallback(_then, p)
|
python
|
{
"resource": ""
}
|
q276122
|
HTTPRequest.getBody
|
test
|
def getBody(self, url, method='GET', headers={}, data=None, socket=None):
"""Make an HTTP request and return the body
"""
if not 'User-Agent' in headers:
headers['User-Agent'] = ['Tensor HTTP checker']
return self.request(url, method, headers, data, socket)
|
python
|
{
"resource": ""
}
|
q276123
|
PersistentCache.expire
|
test
|
def expire(self, age):
"""Expire any items in the cache older than `age` seconds"""
now = time.time()
cache = self._acquire_cache()
expired = [k for k, v in cache.items() if (now - v[0]) > age]
for k in expired:
if k in cache:
del cache[k]
if k in self.store:
del self.store[k]
self._write_cache(cache)
|
python
|
{
"resource": ""
}
|
q276124
|
PersistentCache.set
|
test
|
def set(self, k, v):
"""Set a key `k` to value `v`"""
self.store[k] = (time.time(), v)
self._persist()
|
python
|
{
"resource": ""
}
|
q276125
|
PersistentCache.get
|
test
|
def get(self, k):
"""Returns key contents, and modify time"""
if self._changed():
self._read()
if k in self.store:
return tuple(self.store[k])
else:
return None
|
python
|
{
"resource": ""
}
|
q276126
|
PersistentCache.contains
|
test
|
def contains(self, k):
"""Return True if key `k` exists"""
if self._changed():
self._read()
return k in self.store.keys()
|
python
|
{
"resource": ""
}
|
q276127
|
NistBeacon.chain_check
|
test
|
def chain_check(cls, timestamp: int) -> bool:
"""
Given a record timestamp, verify the chain integrity.
:param timestamp: UNIX time / POSIX time / Epoch time
:return: 'True' if the timestamp fits the chain. 'False' otherwise.
"""
# Creation is messy.
# You want genius, you get madness; two sides of the same coin.
# ... I'm sure this can be cleaned up. However, let's test it first.
record = cls.get_record(timestamp)
if isinstance(record, NistBeaconValue) is False:
# Don't you dare try to play me
return False
prev_record = cls.get_previous(record.timestamp)
next_record = cls.get_next(record.timestamp)
if prev_record is None and next_record is None:
# Uh, how did you manage to do this?
# I'm not even mad, that's amazing.
return False
if (
isinstance(prev_record, NistBeaconValue) and
isinstance(next_record, NistBeaconValue)
):
# Majority case, somewhere in the middle of the chain
# True if:
# - All three records have proper signatures
# - The requested record's previous output equals previous
# - The next possible record's previous output equals the record
return (
record.valid_signature and
prev_record.valid_signature and
next_record.valid_signature and
record.previous_output_value == prev_record.output_value and
next_record.previous_output_value == record.output_value
)
if (
prev_record is None and
isinstance(next_record, NistBeaconValue)
):
# Edge case, this was potentially the first record of all time
return (
record.valid_signature and
next_record.valid_signature and
cls._INIT_RECORD == record and
next_record.previous_output_value == record.output_value
)
if (
isinstance(prev_record, NistBeaconValue) and
next_record is None
):
# Edge case, this was potentially the latest and greatest
return (
record.valid_signature and
prev_record.valid_signature and
record.previous_output_value == prev_record.output_value
)
|
python
|
{
"resource": ""
}
|
q276128
|
NistBeaconValue.from_json
|
test
|
def from_json(cls, input_json: str) -> 'NistBeaconValue':
"""
Convert a string of JSON which represents a NIST randomness beacon
value into a 'NistBeaconValue' object.
:param input_json: JSON to build a 'Nist RandomnessBeaconValue' from
:return: A 'NistBeaconValue' object, 'None' otherwise
"""
try:
data_dict = json.loads(input_json)
except ValueError:
return None
# Our required values are "must haves". This makes it simple
# to verify we loaded everything out of JSON correctly.
required_values = {
cls._KEY_FREQUENCY: None,
cls._KEY_OUTPUT_VALUE: None,
cls._KEY_PREVIOUS_OUTPUT_VALUE: None,
cls._KEY_SEED_VALUE: None,
cls._KEY_SIGNATURE_VALUE: None,
cls._KEY_STATUS_CODE: None,
cls._KEY_TIMESTAMP: None,
cls._KEY_VERSION: None,
}
for key in required_values:
if key in data_dict:
required_values[key] = data_dict[key]
# Confirm that the required values are set, and not 'None'
if None in required_values.values():
return None
# We have all the required values, return a node object
return cls(
version=required_values[cls._KEY_VERSION],
frequency=int(required_values[cls._KEY_FREQUENCY]),
timestamp=int(required_values[cls._KEY_TIMESTAMP]),
seed_value=required_values[cls._KEY_SEED_VALUE],
previous_output_value=required_values[
cls._KEY_PREVIOUS_OUTPUT_VALUE
],
signature_value=required_values[cls._KEY_SIGNATURE_VALUE],
output_value=required_values[cls._KEY_OUTPUT_VALUE],
status_code=required_values[cls._KEY_STATUS_CODE],
)
|
python
|
{
"resource": ""
}
|
q276129
|
NistBeaconValue.from_xml
|
test
|
def from_xml(cls, input_xml: str) -> 'NistBeaconValue':
"""
Convert a string of XML which represents a NIST Randomness Beacon value
into a 'NistBeaconValue' object.
:param input_xml: XML to build a 'NistBeaconValue' from
:return: A 'NistBeaconValue' object, 'None' otherwise
"""
invalid_result = None
understood_namespaces = {
'nist-0.1': 'http://beacon.nist.gov/record/0.1/',
}
# Our required values are "must haves". This makes it simple
# to verify we loaded everything out of XML correctly.
required_values = {
cls._KEY_FREQUENCY: None,
cls._KEY_OUTPUT_VALUE: None,
cls._KEY_PREVIOUS_OUTPUT_VALUE: None,
cls._KEY_SEED_VALUE: None,
cls._KEY_SIGNATURE_VALUE: None,
cls._KEY_STATUS_CODE: None,
cls._KEY_TIMESTAMP: None,
cls._KEY_VERSION: None,
}
# First attempt to load the xml, return 'None' on ParseError
try:
tree = ElementTree.ElementTree(ElementTree.fromstring(input_xml))
except ElementTree.ParseError:
return invalid_result
# Using the required values, let's load the xml values in
for key in required_values:
discovered_element = tree.find(
"{0}:{1}".format('nist-0.1', key),
namespaces=understood_namespaces,
)
if not isinstance(discovered_element, ElementTree.Element):
continue
# Bad pylint message - https://github.com/PyCQA/pylint/issues/476
# pylint: disable=no-member
required_values[key] = discovered_element.text
# Confirm that the required values are set, and not 'None'
if None in required_values.values():
return invalid_result
# We have all the required values, return a node object
return cls(
version=required_values[cls._KEY_VERSION],
frequency=int(required_values[cls._KEY_FREQUENCY]),
timestamp=int(required_values[cls._KEY_TIMESTAMP]),
seed_value=required_values[cls._KEY_SEED_VALUE],
previous_output_value=required_values[
cls._KEY_PREVIOUS_OUTPUT_VALUE
],
signature_value=required_values[cls._KEY_SIGNATURE_VALUE],
output_value=required_values[cls._KEY_OUTPUT_VALUE],
status_code=required_values[cls._KEY_STATUS_CODE],
)
|
python
|
{
"resource": ""
}
|
q276130
|
MinifiedJsTemplateResponse.rendered_content
|
test
|
def rendered_content(self):
"""Returns a 'minified' version of the javascript content"""
template = self.resolve_template(self.template_name)
if django.VERSION[1] < 8:
if template.name.endswith('.min'):
return super(MinifiedJsTemplateResponse, self).rendered_content
else:
if template.template.name.endswith('.min'):
return super(MinifiedJsTemplateResponse, self).rendered_content
# if no minified template exists, minify the response
content = super(MinifiedJsTemplateResponse, self).rendered_content
content = jsmin.jsmin(content)
return content
|
python
|
{
"resource": ""
}
|
q276131
|
LogFollower.get_fn
|
test
|
def get_fn(self, fn, max_lines=None):
"""Passes each parsed log line to `fn`
This is a better idea than storing a giant log file in memory
"""
stat = os.stat(self.logfile)
if (stat.st_ino == self.lastInode) and (stat.st_size == self.lastSize):
# Nothing new
return []
# Handle rollover and rotations vaguely
if (stat.st_ino != self.lastInode) or (stat.st_size < self.lastSize):
self.lastSize = 0
fi = open(self.logfile, 'rt')
fi.seek(self.lastSize)
self.lastInode = stat.st_ino
lines = 0
for i in fi:
lines += 1
if max_lines and (lines > max_lines):
self.storeLast()
fi.close()
return
if '\n' in i:
self.lastSize += len(i)
if self.parser:
line = self.parser(i.strip('\n'))
else:
line = i.strip('\n')
fn(line)
self.storeLast()
fi.close()
|
python
|
{
"resource": ""
}
|
q276132
|
LogFollower.get
|
test
|
def get(self, max_lines=None):
"""Returns a big list of all log lines since the last run
"""
rows = []
self.get_fn(lambda row: rows.append(row), max_lines=max_lines)
return rows
|
python
|
{
"resource": ""
}
|
q276133
|
TokenMixin.validate_token
|
test
|
def validate_token(self, token, expected_data=None):
"""Validate secret link token.
:param token: Token value.
:param expected_data: A dictionary of key/values that must be present
in the data part of the token (i.e. included via ``extra_data`` in
``create_token``).
"""
try:
# Load token and remove random data.
data = self.load_token(token)
# Compare expected data with data in token.
if expected_data:
for k in expected_data:
if expected_data[k] != data["data"].get(k):
return None
return data
except BadData:
return None
|
python
|
{
"resource": ""
}
|
q276134
|
EncryptedTokenMixIn.engine
|
test
|
def engine(self):
"""Get cryptographic engine."""
if not hasattr(self, '_engine'):
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
digest.update(current_app.config['SECRET_KEY'].encode('utf8'))
fernet_key = urlsafe_b64encode(digest.finalize())
self._engine = Fernet(fernet_key)
return self._engine
|
python
|
{
"resource": ""
}
|
q276135
|
EmailConfirmationSerializer.compat_validate_token
|
test
|
def compat_validate_token(cls, *args, **kwargs):
"""Multiple algorithm-compatible token validation."""
data = None
for algorithm in SUPPORTED_DIGEST_ALGORITHMS:
data = cls(algorithm_name=algorithm).validate_token(
*args, **kwargs)
if not data: # move to next algorithm
continue
return data
|
python
|
{
"resource": ""
}
|
q276136
|
SecretLinkFactory.create_token
|
test
|
def create_token(cls, obj_id, data, expires_at=None):
"""Create the secret link token."""
if expires_at:
s = TimedSecretLinkSerializer(expires_at=expires_at)
else:
s = SecretLinkSerializer()
return s.create_token(obj_id, data)
|
python
|
{
"resource": ""
}
|
q276137
|
Counter32
|
test
|
def Counter32(a, b, delta):
"""32bit counter aggregator with wrapping
"""
if b < a:
c = 4294967295 - a
return (c + b) / float(delta)
return (b - a) / float(delta)
|
python
|
{
"resource": ""
}
|
q276138
|
Counter64
|
test
|
def Counter64(a, b, delta):
"""64bit counter aggregator with wrapping
"""
if b < a:
c = 18446744073709551615 - a
return (c + b) / float(delta)
return (b - a) / float(delta)
|
python
|
{
"resource": ""
}
|
q276139
|
average_duration
|
test
|
def average_duration(total_duration, visits):
""" Method to calculate and format an average duration safely """
if not visits:
seconds = 0
else:
seconds = int(round(total_duration / Decimal(visits)))
duration = timedelta(seconds=seconds)
return str(duration)
|
python
|
{
"resource": ""
}
|
q276140
|
TensorService.setupOutputs
|
test
|
def setupOutputs(self, config):
"""Setup output processors"""
if self.proto == 'tcp':
defaultOutput = {
'output': 'tensor.outputs.riemann.RiemannTCP',
'server': self.server,
'port': self.port
}
else:
defaultOutput = {
'output': 'tensor.outputs.riemann.RiemannUDP',
'server': self.server,
'port': self.port
}
outputs = config.get('outputs', [defaultOutput])
for output in outputs:
if not ('debug' in output):
output['debug'] = self.debug
cl = output['output'].split('.')[-1] # class
path = '.'.join(output['output'].split('.')[:-1]) # import path
# Import the module and construct the output object
outputObj = getattr(
importlib.import_module(path), cl)(output, self)
name = output.get('name', None)
# Add the output to our routing hash
if name in self.outputs:
self.outputs[name].append(outputObj)
else:
self.outputs[name] = [outputObj]
# connect the output
reactor.callLater(0, outputObj.createClient)
|
python
|
{
"resource": ""
}
|
q276141
|
TensorService.setupSources
|
test
|
def setupSources(self, config):
"""Sets up source objects from the given config"""
sources = config.get('sources', [])
for source in sources:
src = self.createSource(source)
self.setupTriggers(source, src)
self.sources.append(src)
|
python
|
{
"resource": ""
}
|
q276142
|
TensorService.sendEvent
|
test
|
def sendEvent(self, source, events):
"""Callback that all event sources call when they have a new event
or list of events
"""
if isinstance(events, list):
self.eventCounter += len(events)
else:
self.eventCounter += 1
events = [events]
queue = self._aggregateQueue(events)
if queue:
if (source in self.critical) or (source in self.warn):
self.setStates(source, queue)
self.routeEvent(source, queue)
queue = []
self.lastEvents[source] = time.time()
|
python
|
{
"resource": ""
}
|
q276143
|
TensorService.sourceWatchdog
|
test
|
def sourceWatchdog(self):
"""Watchdog timer function.
Recreates sources which have not generated events in 10*interval if
they have watchdog set to true in their configuration
"""
for i, source in enumerate(self.sources):
if not source.config.get('watchdog', False):
continue
sn = repr(source)
last = self.lastEvents.get(source, None)
if last:
try:
if last < (time.time()-(source.inter*10)):
log.msg("Trying to restart stale source %s: %ss" % (
sn, int(time.time() - last)
))
s = self.sources.pop(i)
try:
s.t.stop()
except Exception as e:
log.msg("Could not stop timer for %s: %s" % (
sn, e))
config = copy.deepcopy(s.config)
del self.lastEvents[source]
del s, source
source = self.createSource(config)
reactor.callLater(0, self._startSource, source)
except Exception as e:
log.msg("Could not reset source %s: %s" % (
sn, e))
|
python
|
{
"resource": ""
}
|
q276144
|
ApacheLogParser._parse_format
|
test
|
def _parse_format(self, format):
"""
Converts the input format to a regular
expression, as well as extracting fields
Raises an exception if it couldn't compile
the generated regex.
"""
format = format.strip()
format = re.sub('[ \t]+',' ',format)
subpatterns = []
findquotes = re.compile(r'^\\"')
findreferreragent = re.compile('Referer|User-Agent')
findpercent = re.compile('^%.*t$')
lstripquotes = re.compile(r'^\\"')
rstripquotes = re.compile(r'\\"$')
header = re.compile(r'.*%\{([^\}]+)\}i')
for element in format.split(' '):
hasquotes = 0
if findquotes.search(element): hasquotes = 1
if hasquotes:
element = lstripquotes.sub('', element)
element = rstripquotes.sub('', element)
head = header.match(element)
if head:
self._names.append(head.groups()[0].lower())
self._types.append(str)
else:
self._names.append(self.alias(element))
self._types.append(self.types.get(element, [None, str])[1])
subpattern = '(\S*)'
if hasquotes:
if element == '%r' or findreferreragent.search(element):
subpattern = r'\"([^"\\]*(?:\\.[^"\\]*)*)\"'
else:
subpattern = r'\"([^\"]*)\"'
elif findpercent.search(element):
subpattern = r'(\[[^\]]+\])'
elif element == '%U':
subpattern = '(.+?)'
subpatterns.append(subpattern)
self._pattern = '^' + ' '.join(subpatterns) + '$'
try:
self._regex = re.compile(self._pattern)
except Exception as e:
raise ApacheLogParserError(e)
|
python
|
{
"resource": ""
}
|
q276145
|
ApacheLogParser.parse
|
test
|
def parse(self, line):
"""
Parses a single line from the log file and returns
a dictionary of it's contents.
Raises and exception if it couldn't parse the line
"""
line = line.strip()
match = self._regex.match(line)
if match:
data = {}
for i, e in enumerate(match.groups()):
if e == "-":
k, v = self._names[i], None
else:
k, v = self._names[i], self._types[i](e)
data[k] = v
return data
raise ApacheLogParserError("Unable to parse: %s" % line)
|
python
|
{
"resource": ""
}
|
q276146
|
validate_expires_at
|
test
|
def validate_expires_at(form, field):
"""Validate that date is in the future."""
if form.accept.data:
if not field.data or datetime.utcnow().date() >= field.data:
raise validators.StopValidation(_(
"Please provide a future date."
))
if not field.data or \
datetime.utcnow().date() + timedelta(days=365) < field.data:
raise validators.StopValidation(_(
"Please provide a date no more than 1 year into the future."
))
|
python
|
{
"resource": ""
}
|
q276147
|
ApprovalForm.validate_message
|
test
|
def validate_message(form, field):
"""Validate message."""
if form.reject.data and not field.data.strip():
raise validators.ValidationError(
_("You are required to provide message to the requester when"
" you reject a request.")
)
|
python
|
{
"resource": ""
}
|
q276148
|
verify_token
|
test
|
def verify_token():
"""Verify token and save in session if it's valid."""
try:
from .models import SecretLink
token = request.args['token']
# if the token is valid
if token and SecretLink.validate_token(token, {}):
# then save in session the token
session['accessrequests-secret-token'] = token
except KeyError:
pass
|
python
|
{
"resource": ""
}
|
q276149
|
Device.name
|
test
|
def name(self):
""" Return a basic meaningful name based on device type """
if (
self.device_type and
self.device_type.code in (DeviceType.MOBILE, DeviceType.TABLET)
):
return self.device
else:
return self.browser
|
python
|
{
"resource": ""
}
|
q276150
|
_warn_node
|
test
|
def _warn_node(self, msg, *args, **kwargs):
"""Do not warn on external images."""
if not msg.startswith('nonlocal image URI found:'):
_warn_node_old(self, msg, *args, **kwargs)
|
python
|
{
"resource": ""
}
|
q276151
|
connect_receivers
|
test
|
def connect_receivers():
"""Connect receivers to signals."""
request_created.connect(send_email_validation)
request_confirmed.connect(send_confirmed_notifications)
request_rejected.connect(send_reject_notification)
# Order is important:
request_accepted.connect(create_secret_link)
request_accepted.connect(send_accept_notification)
|
python
|
{
"resource": ""
}
|
q276152
|
create_secret_link
|
test
|
def create_secret_link(request, message=None, expires_at=None):
"""Receiver for request-accepted signal."""
pid, record = get_record(request.recid)
if not record:
raise RecordNotFound(request.recid)
description = render_template(
"zenodo_accessrequests/link_description.tpl",
request=request,
record=record,
pid=pid,
expires_at=expires_at,
message=message,
)
request.create_secret_link(
record["title"],
description=description,
expires_at=expires_at
)
|
python
|
{
"resource": ""
}
|
q276153
|
send_accept_notification
|
test
|
def send_accept_notification(request, message=None, expires_at=None):
"""Receiver for request-accepted signal to send email notification."""
pid, record = get_record(request.recid)
_send_notification(
request.sender_email,
_("Access request accepted"),
"zenodo_accessrequests/emails/accepted.tpl",
request=request,
record=record,
pid=pid,
record_link=request.link.get_absolute_url('invenio_records_ui.recid'),
message=message,
expires_at=expires_at,
)
|
python
|
{
"resource": ""
}
|
q276154
|
send_confirmed_notifications
|
test
|
def send_confirmed_notifications(request):
"""Receiver for request-confirmed signal to send email notification."""
pid, record = get_record(request.recid)
if record is None:
current_app.logger.error("Cannot retrieve record %s. Emails not sent"
% request.recid)
return
title = _("Access request: %(record)s", record=record["title"])
_send_notification(
request.receiver.email,
title,
"zenodo_accessrequests/emails/new_request.tpl",
request=request,
record=record,
pid=pid,
)
_send_notification(
request.sender_email,
title,
"zenodo_accessrequests/emails/confirmation.tpl",
request=request,
record=record,
pid=pid,
)
|
python
|
{
"resource": ""
}
|
q276155
|
send_email_validation
|
test
|
def send_email_validation(request):
"""Receiver for request-created signal to send email notification."""
token = EmailConfirmationSerializer().create_token(
request.id, dict(email=request.sender_email)
)
pid, record = get_record(request.recid)
_send_notification(
request.sender_email,
_("Access request verification"),
"zenodo_accessrequests/emails/validate_email.tpl",
request=request,
record=record,
pid=pid,
days=timedelta(
seconds=current_app.config["ACCESSREQUESTS_CONFIRMLINK_EXPIRES_IN"]
).days,
confirm_link=url_for(
"invenio_records_ui.recid_access_request_email_confirm",
pid_value=request.recid,
token=token,
_external=True,
)
)
|
python
|
{
"resource": ""
}
|
q276156
|
send_reject_notification
|
test
|
def send_reject_notification(request, message=None):
"""Receiver for request-rejected signal to send email notification."""
pid, record = get_record(request.recid)
_send_notification(
request.sender_email,
_("Access request rejected"),
"zenodo_accessrequests/emails/rejected.tpl",
request=request,
record=record,
pid=pid,
message=message,
)
|
python
|
{
"resource": ""
}
|
q276157
|
_send_notification
|
test
|
def _send_notification(to, subject, template, **ctx):
"""Render a template and send as email."""
msg = Message(
subject,
sender=current_app.config.get('SUPPORT_EMAIL'),
recipients=[to]
)
msg.body = render_template(template, **ctx)
send_email.delay(msg.__dict__)
|
python
|
{
"resource": ""
}
|
q276158
|
SecretLink.create
|
test
|
def create(cls, title, owner, extra_data, description="", expires_at=None):
"""Create a new secret link."""
if isinstance(expires_at, date):
expires_at = datetime.combine(expires_at, datetime.min.time())
with db.session.begin_nested():
obj = cls(
owner=owner,
title=title,
description=description,
expires_at=expires_at,
token='',
)
db.session.add(obj)
with db.session.begin_nested():
# Create token (dependent on obj.id and recid)
obj.token = SecretLinkFactory.create_token(
obj.id, extra_data, expires_at=expires_at
).decode('utf8')
link_created.send(obj)
return obj
|
python
|
{
"resource": ""
}
|
q276159
|
SecretLink.validate_token
|
test
|
def validate_token(cls, token, expected_data):
"""Validate a secret link token.
Only queries the database if token is valid to determine that the token
has not been revoked.
"""
data = SecretLinkFactory.validate_token(
token, expected_data=expected_data
)
if data:
link = cls.query.get(data['id'])
if link and link.is_valid():
return True
return False
|
python
|
{
"resource": ""
}
|
q276160
|
SecretLink.revoke
|
test
|
def revoke(self):
"""Revoken a secret link."""
if self.revoked_at is None:
with db.session.begin_nested():
self.revoked_at = datetime.utcnow()
link_revoked.send(self)
return True
return False
|
python
|
{
"resource": ""
}
|
q276161
|
AccessRequest.create
|
test
|
def create(cls, recid=None, receiver=None, sender_full_name=None,
sender_email=None, justification=None, sender=None):
"""Create a new access request.
:param recid: Record id (required).
:param receiver: User object of receiver (required).
:param sender_full_name: Full name of sender (required).
:param sender_email: Email address of sender (required).
:param justification: Justification message (required).
:param sender: User object of sender (optional).
"""
sender_user_id = None if sender is None else sender.id
assert recid
assert receiver
assert sender_full_name
assert sender_email
assert justification
# Determine status
status = RequestStatus.EMAIL_VALIDATION
if sender and sender.confirmed_at:
status = RequestStatus.PENDING
with db.session.begin_nested():
# Create object
obj = cls(
status=status,
recid=recid,
receiver_user_id=receiver.id,
sender_user_id=sender_user_id,
sender_full_name=sender_full_name,
sender_email=sender_email,
justification=justification
)
db.session.add(obj)
# Send signal
if obj.status == RequestStatus.EMAIL_VALIDATION:
request_created.send(obj)
else:
request_confirmed.send(obj)
return obj
|
python
|
{
"resource": ""
}
|
q276162
|
AccessRequest.get_by_receiver
|
test
|
def get_by_receiver(cls, request_id, user):
"""Get access request for a specific receiver."""
return cls.query.filter_by(
id=request_id,
receiver_user_id=user.id
).first()
|
python
|
{
"resource": ""
}
|
q276163
|
AccessRequest.confirm_email
|
test
|
def confirm_email(self):
"""Confirm that senders email is valid."""
with db.session.begin_nested():
if self.status != RequestStatus.EMAIL_VALIDATION:
raise InvalidRequestStateError(RequestStatus.EMAIL_VALIDATION)
self.status = RequestStatus.PENDING
request_confirmed.send(self)
|
python
|
{
"resource": ""
}
|
q276164
|
AccessRequest.accept
|
test
|
def accept(self, message=None, expires_at=None):
"""Accept request."""
with db.session.begin_nested():
if self.status != RequestStatus.PENDING:
raise InvalidRequestStateError(RequestStatus.PENDING)
self.status = RequestStatus.ACCEPTED
request_accepted.send(self, message=message, expires_at=expires_at)
|
python
|
{
"resource": ""
}
|
q276165
|
AccessRequest.reject
|
test
|
def reject(self, message=None):
"""Reject request."""
with db.session.begin_nested():
if self.status != RequestStatus.PENDING:
raise InvalidRequestStateError(RequestStatus.PENDING)
self.status = RequestStatus.REJECTED
request_rejected.send(self, message=message)
|
python
|
{
"resource": ""
}
|
q276166
|
AccessRequest.create_secret_link
|
test
|
def create_secret_link(self, title, description=None, expires_at=None):
"""Create a secret link from request."""
self.link = SecretLink.create(
title,
self.receiver,
extra_data=dict(recid=self.recid),
description=description,
expires_at=expires_at,
)
return self.link
|
python
|
{
"resource": ""
}
|
q276167
|
NistBeaconCrypto.get_hash
|
test
|
def get_hash(
cls,
version: str,
frequency: int,
timestamp: int,
seed_value: str,
prev_output: str,
status_code: str,
) -> SHA512Hash:
"""
Given required properties from a NistBeaconValue,
compute the SHA512Hash object.
:param version: NistBeaconValue.version
:param frequency: NistBeaconValue.frequency
:param timestamp: NistBeaconValue.timestamp
:param seed_value: NistBeaconValue.seed_value
:param prev_output: NistBeaconValue.previous_output_value
:param status_code: NistBeaconValue.status_code
:return: SHA512 Hash for NistBeaconValue signature verification
"""
return SHA512.new(
version.encode() +
struct.pack(
'>1I1Q64s64s1I',
frequency,
timestamp,
binascii.a2b_hex(seed_value),
binascii.a2b_hex(prev_output),
int(status_code),
)
)
|
python
|
{
"resource": ""
}
|
q276168
|
NistBeaconCrypto.verify
|
test
|
def verify(
cls,
timestamp: int,
message_hash: SHA512Hash,
signature: bytes,
) -> bool:
"""
Verify a given NIST message hash and signature for a beacon value.
:param timestamp: The timestamp of the record being verified.
:param message_hash:
The hash that was carried out over the message.
This is an object belonging to the `Crypto.Hash` module.
:param signature: The signature that needs to be validated.
:return: True if verification is correct. False otherwise.
"""
# Determine verifier type to use based on timestamp.
if timestamp < 1496176860:
verifier = cls._VERIFIER_20130905
elif timestamp < 1502202360:
verifier = None
else:
verifier = cls._VERIFIER_20170808
# If a verifier exists to handle this problem, use it directly.
# Else, we cannot verify the record and must mark it invalid.
if verifier:
result = verifier.verify(
message_hash,
signature,
)
else:
result = False
# Convert 1 to 'True', 'False' otherwise
if isinstance(result, int):
result = True if result == 1 else False
return result
|
python
|
{
"resource": ""
}
|
q276169
|
is_embargoed
|
test
|
def is_embargoed(record):
"""Template filter to check if a record is embargoed."""
return record.get('access_right') == 'embargoed' and \
record.get('embargo_date') and \
record.get('embargo_date') > datetime.utcnow().date()
|
python
|
{
"resource": ""
}
|
q276170
|
access_request
|
test
|
def access_request(pid, record, template, **kwargs):
"""Create an access request."""
recid = int(pid.pid_value)
datastore = LocalProxy(
lambda: current_app.extensions['security'].datastore)
# Record must be in restricted access mode.
if record.get('access_right') != 'restricted' or \
not record.get('access_conditions'):
abort(404)
# Record must have an owner and owner must still exists.
owners = record.get('owners', [])
record_owners = [datastore.find_user(id=owner_id) for owner_id in owners]
if not record_owners:
abort(404)
sender = None
initialdata = dict()
# Prepare initial form data
if current_user.is_authenticated:
sender = current_user
initialdata['email'] = current_user.email
if current_user.profile:
initialdata['full_name'] = current_user.profile.full_name
# Normal form validation
form = AccessRequestForm(formdata=request.form, **initialdata)
if form.validate_on_submit():
accreq = AccessRequest.create(
recid=recid,
receiver=record_owners[0],
sender_full_name=form.data['full_name'],
sender_email=form.data['email'],
justification=form.data['justification'],
sender=sender
)
db.session.commit()
if accreq.status == RequestStatus.EMAIL_VALIDATION:
flash(_(
"Email confirmation needed: We have sent you an email to "
"verify your address. Please check the email and follow the "
"instructions to complete the access request."),
category='info')
else:
flash(_("Access request submitted."), category='info')
return redirect(url_for('invenio_records_ui.recid', pid_value=recid))
return render_template(
template,
pid=pid,
record=record,
form=form,
owners=record_owners,
)
|
python
|
{
"resource": ""
}
|
q276171
|
confirm
|
test
|
def confirm(pid, record, template, **kwargs):
"""Confirm email address."""
recid = int(pid.pid_value)
token = request.view_args['token']
# Validate token
data = EmailConfirmationSerializer.compat_validate_token(token)
if data is None:
flash(_("Invalid confirmation link."), category='danger')
return redirect(url_for("invenio_records_ui.recid", pid_value=recid))
# Validate request exists.
r = AccessRequest.query.get(data['id'])
if not r:
abort(404)
# Confirm email address.
if r.status != RequestStatus.EMAIL_VALIDATION:
abort(404)
r.confirm_email()
db.session.commit()
flash(_("Email validated and access request submitted."), category='info')
return redirect(url_for("invenio_records_ui.recid", pid_value=recid))
|
python
|
{
"resource": ""
}
|
q276172
|
SSHClient._get_endpoint
|
test
|
def _get_endpoint(self):
""" Creates a generic endpoint connection that doesn't finish
"""
return SSHCommandClientEndpoint.newConnection(
reactor, b'/bin/cat', self.username, self.hostname,
port=self.port, keys=self.keys, password=self.password,
knownHosts = self.knownHosts)
|
python
|
{
"resource": ""
}
|
q276173
|
Ordering.reverse
|
test
|
def reverse(self, col):
"""Get reverse direction of ordering."""
if col in self.options:
if self.is_selected(col):
return col if not self.asc else '-{0}'.format(col)
else:
return col
return None
|
python
|
{
"resource": ""
}
|
q276174
|
Ordering.selected
|
test
|
def selected(self):
"""Get column which is being order by."""
if self._selected:
return self._selected if self.asc else \
"-{0}".format(self._selected)
return None
|
python
|
{
"resource": ""
}
|
q276175
|
QueryOrdering.items
|
test
|
def items(self):
"""Get query with correct ordering."""
if self.asc is not None:
if self._selected and self.asc:
return self.query.order_by(self._selected)
elif self._selected and not self.asc:
return self.query.order_by(desc(self._selected))
return self.query
|
python
|
{
"resource": ""
}
|
q276176
|
FileVersionInfo.get_version
|
test
|
def get_version(self) -> str:
"""
Open the file referenced in this object, and scrape the version.
:return:
The version as a string, an empty string if there is no match
to the magic_line, or any file exception messages encountered.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close()
except Exception as e:
return str(e)
result = ''
for line in lines:
if self.magic_line in line:
start = len(self.magic_line)
end = len(line) - self.strip_end_chars
result = line[start:end]
break
return result
|
python
|
{
"resource": ""
}
|
q276177
|
FileVersionInfo.set_version
|
test
|
def set_version(self, new_version: str):
"""
Set the version for this given file.
:param new_version: The new version string to set.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close()
except Exception as e:
print(str(e))
return
for idx, line in enumerate(lines):
if self.magic_line in line:
start = len(self.magic_line)
end = len(line) - self.strip_end_chars
start_str = line[0:start]
end_str = line[end:]
lines[idx] = start_str + new_version + end_str
try:
f = open(self.file_path, 'w')
f.writelines(lines)
f.close()
except Exception as e:
print(str(e))
return
|
python
|
{
"resource": ""
}
|
q276178
|
Source._init_ssh
|
test
|
def _init_ssh(self):
""" Configure SSH client options
"""
self.ssh_host = self.config.get('ssh_host', self.hostname)
self.known_hosts = self.config.get('ssh_knownhosts_file',
self.tensor.config.get('ssh_knownhosts_file', None))
self.ssh_keyfile = self.config.get('ssh_keyfile',
self.tensor.config.get('ssh_keyfile', None))
self.ssh_key = self.config.get('ssh_key',
self.tensor.config.get('ssh_key', None))
# Not sure why you'd bother but maybe you've got a weird policy
self.ssh_keypass = self.config.get('ssh_keypass',
self.tensor.config.get('ssh_keypass', None))
self.ssh_user = self.config.get('ssh_username',
self.tensor.config.get('ssh_username', None))
self.ssh_password = self.config.get('ssh_password',
self.tensor.config.get('ssh_password', None))
self.ssh_port = self.config.get('ssh_port',
self.tensor.config.get('ssh_port', 22))
# Verify config to see if we're good to go
if not (self.ssh_key or self.ssh_keyfile or self.ssh_password):
raise Exception("To use SSH you must specify *one* of ssh_key,"
" ssh_keyfile or ssh_password for this source"
" check or globally")
if not self.ssh_user:
raise Exception("ssh_username must be set")
self.ssh_keydb = []
cHash = hashlib.sha1(':'.join((
self.ssh_host, self.ssh_user, str(self.ssh_port),
str(self.ssh_password), str(self.ssh_key),
str(self.ssh_keyfile)
)).encode()).hexdigest()
if cHash in self.tensor.hostConnectorCache:
self.ssh_client = self.tensor.hostConnectorCache.get(cHash)
self.ssh_connector = False
else:
self.ssh_connector = True
self.ssh_client = ssh.SSHClient(self.ssh_host, self.ssh_user,
self.ssh_port, password=self.ssh_password,
knownhosts=self.known_hosts)
if self.ssh_keyfile:
self.ssh_client.addKeyFile(self.ssh_keyfile, self.ssh_keypass)
if self.ssh_key:
self.ssh_client.addKeyString(self.ssh_key, self.ssh_keypass)
self.tensor.hostConnectorCache[cHash] = self.ssh_client
|
python
|
{
"resource": ""
}
|
q276179
|
Source.startTimer
|
test
|
def startTimer(self):
"""Starts the timer for this source"""
self.td = self.t.start(self.inter)
if self.use_ssh and self.ssh_connector:
self.ssh_client.connect()
|
python
|
{
"resource": ""
}
|
q276180
|
Source.tick
|
test
|
def tick(self):
"""Called for every timer tick. Calls self.get which can be a deferred
and passes that result back to the queueBack method
Returns a deferred"""
if self.sync:
if self.running:
defer.returnValue(None)
self.running = True
try:
event = yield self._get()
if event:
self.queueBack(event)
except Exception as e:
log.msg("[%s] Unhandled error: %s" % (self.service, e))
self.running = False
|
python
|
{
"resource": ""
}
|
q276181
|
index
|
test
|
def index():
"""List pending access requests and shared links."""
query = request.args.get('query', '')
order = request.args.get('sort', '-created')
try:
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 20))
except (TypeError, ValueError):
abort(404)
# Delete form
form = DeleteForm(request.form)
if form.validate_on_submit():
link = SecretLink.query_by_owner(current_user).filter_by(
id=form.link.data).first()
if link.revoke():
flash(_("Shared link revoked."), category='success')
db.session.commit()
# Links
links = SecretLink.query_by_owner(current_user).filter(
SecretLink.revoked_at.is_(None)
)
# Querying
if query:
lquery = "%{0}%".format(query)
links = links.filter(
SecretLink.title.like(lquery) | SecretLink.description.like(lquery)
)
# Ordering
ordering = QueryOrdering(links, ['title', 'created', 'expires_at'], order)
links = ordering.items()
# Pending access requests
requests = AccessRequest.query_by_receiver(current_user).filter_by(
status=RequestStatus.PENDING).order_by('created')
return render_template(
"zenodo_accessrequests/settings/index.html",
links_pagination=links.paginate(page, per_page=per_page),
requests=requests,
query=query,
order=ordering,
get_record=get_record,
form=DeleteForm(),
)
|
python
|
{
"resource": ""
}
|
q276182
|
RiemannTCP.createClient
|
test
|
def createClient(self):
"""Create a TCP connection to Riemann with automatic reconnection
"""
server = self.config.get('server', 'localhost')
port = self.config.get('port', 5555)
failover = self.config.get('failover', False)
self.factory = riemann.RiemannClientFactory(server, failover=failover)
if failover:
initial = random.choice(server)
else:
initial = server
log.msg('Connecting to Riemann on %s:%s' % (initial, port))
if self.tls:
if SSL:
self.connector = reactor.connectSSL(initial, port, self.factory,
ClientTLSContext(self.key, self.cert))
else:
log.msg('[FATAL] SSL support not available!' \
' Please install PyOpenSSL. Exiting now')
reactor.stop()
else:
self.connector = reactor.connectTCP(initial, port, self.factory)
d = defer.Deferred()
def cb():
# Wait until we have a useful proto object
if hasattr(self.factory, 'proto') and self.factory.proto:
self.t.start(self.inter)
d.callback(None)
else:
reactor.callLater(0.01, cb)
cb()
return d
|
python
|
{
"resource": ""
}
|
q276183
|
RiemannTCP.stop
|
test
|
def stop(self):
"""Stop this client.
"""
self.t.stop()
self.factory.stopTrying()
self.connector.disconnect()
|
python
|
{
"resource": ""
}
|
q276184
|
RiemannTCP.emptyQueue
|
test
|
def emptyQueue(self):
"""Remove all or self.queueDepth events from the queue
"""
if self.events:
if self.queueDepth and (len(self.events) > self.queueDepth):
# Remove maximum of self.queueDepth items from queue
events = self.events[:self.queueDepth]
self.events = self.events[self.queueDepth:]
else:
events = self.events
self.events = []
if self.allow_nan:
self.factory.proto.sendEvents(events)
else:
self.factory.proto.sendEvents([e for e in events if e.metric is not None])
|
python
|
{
"resource": ""
}
|
q276185
|
RiemannTCP.eventsReceived
|
test
|
def eventsReceived(self, events):
"""Receives a list of events and transmits them to Riemann
Arguments:
events -- list of `tensor.objects.Event`
"""
# Make sure queue isn't oversized
if (self.maxsize < 1) or (len(self.events) < self.maxsize):
self.events.extend(events)
|
python
|
{
"resource": ""
}
|
q276186
|
RiemannUDP.createClient
|
test
|
def createClient(self):
"""Create a UDP connection to Riemann"""
server = self.config.get('server', '127.0.0.1')
port = self.config.get('port', 5555)
def connect(ip):
self.protocol = riemann.RiemannUDP(ip, port)
self.endpoint = reactor.listenUDP(0, self.protocol)
d = reactor.resolve(server)
d.addCallback(connect)
return d
|
python
|
{
"resource": ""
}
|
q276187
|
ElasticSearch.createClient
|
test
|
def createClient(self):
"""Sets up HTTP connector and starts queue timer
"""
server = self.config.get('server', 'localhost')
port = int(self.config.get('port', 9200))
self.client = elasticsearch.ElasticSearch(self.url, self.user,
self.password, self.index)
self.t.start(self.inter)
|
python
|
{
"resource": ""
}
|
q276188
|
RiemannProtobufMixin.encodeEvent
|
test
|
def encodeEvent(self, event):
"""Adapts an Event object to a Riemann protobuf event Event"""
pbevent = proto_pb2.Event(
time=int(event.time),
state=event.state,
service=event.service,
host=event.hostname,
description=event.description,
tags=event.tags,
ttl=event.ttl,
)
if event.metric is not None:
# I have no idea what I'm doing
if isinstance(event.metric, int):
pbevent.metric_sint64 = event.metric
pbevent.metric_f = float(event.metric)
else:
pbevent.metric_d = float(event.metric)
pbevent.metric_f = float(event.metric)
if event.attributes is not None:
for key, value in event.attributes.items():
attribute = pbevent.attributes.add()
attribute.key, attribute.value = key, value
return pbevent
|
python
|
{
"resource": ""
}
|
q276189
|
RiemannProtobufMixin.encodeMessage
|
test
|
def encodeMessage(self, events):
"""Encode a list of Tensor events with protobuf"""
message = proto_pb2.Msg(
events=[self.encodeEvent(e) for e in events if e._type=='riemann']
)
return message.SerializeToString()
|
python
|
{
"resource": ""
}
|
q276190
|
RiemannProtobufMixin.decodeMessage
|
test
|
def decodeMessage(self, data):
"""Decode a protobuf message into a list of Tensor events"""
message = proto_pb2.Msg()
message.ParseFromString(data)
return message
|
python
|
{
"resource": ""
}
|
q276191
|
RiemannProtobufMixin.sendEvents
|
test
|
def sendEvents(self, events):
"""Send a Tensor Event to Riemann"""
self.pressure += 1
self.sendString(self.encodeMessage(events))
|
python
|
{
"resource": ""
}
|
q276192
|
generate
|
test
|
def generate(ctx, url, *args, **kwargs):
"""
Generate preview for URL.
"""
file_previews = ctx.obj['file_previews']
options = {}
metadata = kwargs['metadata']
width = kwargs['width']
height = kwargs['height']
output_format = kwargs['format']
if metadata:
options['metadata'] = metadata.split(',')
if width:
options.setdefault('size', {})
options['size']['width'] = width
if height:
options.setdefault('size', {})
options['size']['height'] = height
if output_format:
options['format'] = output_format
results = file_previews.generate(url, **options)
click.echo(results)
|
python
|
{
"resource": ""
}
|
q276193
|
retrieve
|
test
|
def retrieve(ctx, preview_id, *args, **kwargs):
"""
Retreive preview results for ID.
"""
file_previews = ctx.obj['file_previews']
results = file_previews.retrieve(preview_id)
click.echo(results)
|
python
|
{
"resource": ""
}
|
q276194
|
Worker.r_q_send
|
test
|
def r_q_send(self, msg_dict):
"""Send message dicts through r_q, and throw explicit errors for
pickle problems"""
# Check whether msg_dict can be pickled...
no_pickle_keys = self.invalid_dict_pickle_keys(msg_dict)
if no_pickle_keys == []:
self.r_q.put(msg_dict)
else:
## Explicit pickle error handling
hash_func = md5()
hash_func.update(str(msg_dict))
dict_hash = str(hash_func.hexdigest())[-7:] # Last 7 digits of hash
linesep = os.linesep
sys.stderr.write(
"{0} {1}r_q_send({2}) Can't pickle this dict:{3} '''{7}{4} {5}{7}{6}''' {7}".format(
datetime.now(),
Style.BRIGHT,
dict_hash,
Style.RESET_ALL,
Fore.MAGENTA,
msg_dict,
Style.RESET_ALL,
linesep,
)
)
## Verbose list of the offending key(s) / object attrs
## Send all output to stderr...
err_frag1 = (
Style.BRIGHT
+ " r_q_send({0}) Offending dict keys:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = Fore.YELLOW + " {0}".format(no_pickle_keys) + Style.RESET_ALL
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for key in sorted(no_pickle_keys):
sys.stderr.write(
" msg_dict['{0}']: {1}'{2}'{3}{4}".format(
key,
Fore.MAGENTA,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
if isinstance(msg_dict.get(key), object):
thisobj = msg_dict.get(key)
no_pickle_attrs = self.invalid_obj_pickle_attrs(thisobj)
err_frag1 = (
Style.BRIGHT
+ " r_q_send({0}) Offending attrs:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = (
Fore.YELLOW + " {0}".format(no_pickle_attrs) + Style.RESET_ALL
)
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for attr in no_pickle_attrs:
sys.stderr.write(
" msg_dict['{0}'].{1}: {2}'{3}'{4}{5}".format(
key,
attr,
Fore.RED,
repr(getattr(thisobj, attr)),
Style.RESET_ALL,
linesep,
)
)
sys.stderr.write(
" {0}r_q_send({1}) keys (no problems):{2}{3}".format(
Style.BRIGHT, dict_hash, Style.RESET_ALL, linesep
)
)
for key in sorted(set(msg_dict.keys()).difference(no_pickle_keys)):
sys.stderr.write(
" msg_dict['{0}']: {1}{2}{3}{4}".format(
key,
Fore.GREEN,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
|
python
|
{
"resource": ""
}
|
q276195
|
Worker.message_loop
|
test
|
def message_loop(self, t_q, r_q):
"""Loop through messages and execute tasks"""
t_msg = {}
while t_msg.get("state", "") != "__DIE__":
try:
t_msg = t_q.get(True, self.cycle_sleep) # Poll blocking
self.task = t_msg.get("task", "") # __DIE__ has no task
if self.task != "":
self.task.task_start = time.time() # Start the timer
# Send ACK to the controller who requested work on this task
self.r_q_send(
{"w_id": self.w_id, "task": self.task, "state": "__ACK__"}
)
# Update the sleep time with latest recommendations
self.cycle_sleep = self.task.worker_loop_delay
# Assign the result of task.run() to task.result
self.task.result = self.task.run()
self.task.task_stop = time.time() # Seconds since epoch
self.r_q_send(
{"w_id": self.w_id, "task": self.task, "state": "__FINISHED__"}
) # Ack work finished
self.task = None
except Empty:
pass
except Full:
time.sleep(0.1)
## Disable extraneous error handling...
except:
if self.task is not None:
self.task.task_stop = time.time() # Seconds since epoch
# Handle all other errors here...
tb_str = "".join(tb.format_exception(*(sys.exc_info())))
self.r_q_send(
{
"w_id": self.w_id,
"task": self.task,
"error": tb_str,
"state": "__ERROR__",
}
)
return
|
python
|
{
"resource": ""
}
|
q276196
|
TaskMgrStats.log_time
|
test
|
def log_time(self):
"""Return True if it's time to log"""
if self.hot_loop and self.time_delta >= self.log_interval:
return True
return False
|
python
|
{
"resource": ""
}
|
q276197
|
SASLStateMachine.response
|
test
|
def response(self, payload):
"""
Send a response to the previously received challenge, with the given
`payload`. The payload is encoded using base64 and transmitted to the
server.
Return the next state of the state machine as tuple (see
:class:`SASLStateMachine` for details).
"""
if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE:
if payload != b"":
# XXX: either our mechanism is buggy or the server
# sent SASLState.SUCCESS before all challenge-response
# messages defined by the mechanism were sent
self._state = SASLState.FAILURE
raise SASLFailure(
None,
"protocol violation: mechanism did not"
" respond with an empty response to a"
" challenge with final data – this suggests"
" a protocol-violating early success from the server."
)
self._state = SASLState.SUCCESS
return SASLState.SUCCESS, None
if self._state != SASLState.CHALLENGE:
raise RuntimeError(
"no challenge has been made or negotiation failed")
try:
next_state, payload = yield from self.interface.respond(payload)
except SASLFailure:
self._state = SASLState.FAILURE
raise
next_state = SASLState.from_reply(next_state)
# unfold the (SASLState.SUCCESS, payload) to a sequence of
# (SASLState.CHALLENGE, payload), (SASLState.SUCCESS, None) for the SASLMethod
# to allow uniform treatment of both cases
if next_state == SASLState.SUCCESS and payload is not None:
self._state = SASLState.SUCCESS_SIMULATE_CHALLENGE
return SASLState.CHALLENGE, payload
self._state = next_state
return next_state, payload
|
python
|
{
"resource": ""
}
|
q276198
|
SASLStateMachine.abort
|
test
|
def abort(self):
"""
Abort an initiated SASL authentication process. The expected result
state is ``failure``.
"""
if self._state == SASLState.INITIAL:
raise RuntimeError("SASL authentication hasn't started yet")
if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE:
raise RuntimeError("SASL message exchange already over")
try:
return (yield from self.interface.abort())
finally:
self._state = SASLState.FAILURE
|
python
|
{
"resource": ""
}
|
q276199
|
_saslprep_do_mapping
|
test
|
def _saslprep_do_mapping(chars):
"""
Perform the stringprep mapping step of SASLprep. Operates in-place on a
list of unicode characters provided in `chars`.
"""
i = 0
while i < len(chars):
c = chars[i]
if stringprep.in_table_c12(c):
chars[i] = "\u0020"
elif stringprep.in_table_b1(c):
del chars[i]
continue
i += 1
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.