_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 31
13.1k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q276100
|
always_iterable
|
test
|
def always_iterable(item):
"""
Given an object, always return an iterable. If the item is not
already iterable, return a tuple containing only the item. If item is
None, an empty iterable is returned.
>>> always_iterable([1,2,3])
<list_iterator...>
>>> always_iterable('foo')
<tuple_iterator...>
>>> always_iterable(None)
<tuple_iterator...>
>>> always_iterable(range(10))
<range_iterator...>
>>> def _test_func(): yield "I'm iterable"
>>> print(next(always_iterable(_test_func())))
|
python
|
{
"resource": ""
}
|
q276101
|
suppress_exceptions
|
test
|
def suppress_exceptions(callables, *exceptions):
"""
Call each callable in callables, suppressing any exceptions supplied. If
no exception classes are supplied, all Exceptions will be suppressed.
>>> import functools
>>> c1 = functools.partial(int, 'a')
>>> c2 = functools.partial(int, '10')
>>> list(suppress_exceptions((c1, c2)))
[10]
>>> list(suppress_exceptions((c1, c2), KeyError))
|
python
|
{
"resource": ""
}
|
q276102
|
duplicates
|
test
|
def duplicates(*iterables, **kwargs):
"""
Yield duplicate items from any number of sorted iterables of items
>>> items_a = [1, 2, 3]
>>> items_b = [0, 3, 4, 5, 6]
>>> list(duplicates(items_a, items_b))
[(3, 3)]
It won't behave as you expect if the iterables aren't ordered
>>> items_b.append(1)
>>> list(duplicates(items_a, items_b))
[(3, 3)]
>>> list(duplicates(items_a, sorted(items_b)))
[(1, 1), (3, 3)]
This function is most interesting when it's operating on a key
of more complex objects.
>>> items_a = [dict(email='[email protected]', id=1)]
>>> items_b = [dict(email='[email protected]', id=2), dict(email='other')]
>>> dupe, = duplicates(items_a, items_b, key=operator.itemgetter('email'))
>>> dupe[0]['email'] ==
|
python
|
{
"resource": ""
}
|
q276103
|
assert_ordered
|
test
|
def assert_ordered(iterable, key=lambda x: x, comp=operator.le):
"""
Assert that for all items in the iterable, they're in order based on comp
>>> list(assert_ordered(range(5)))
[0, 1, 2, 3, 4]
>>> list(assert_ordered(range(5), comp=operator.ge))
Traceback (most recent call last):
...
AssertionError: 0 < 1
>>> list(assert_ordered(range(5, 0, -1), key=operator.neg))
[5, 4, 3, 2, 1]
"""
err_tmpl = (
"{pair[0]} > {pair[1]}" if comp is operator.le else
|
python
|
{
"resource": ""
}
|
q276104
|
_swap_on_miss
|
test
|
def _swap_on_miss(partition_result):
"""
Given a partition_dict result, if the partition missed, swap
the before and after.
"""
|
python
|
{
"resource": ""
}
|
q276105
|
partition_dict
|
test
|
def partition_dict(items, key):
"""
Given an ordered dictionary of items and a key in that dict,
return an ordered dict of items before, the keyed item, and
an ordered dict of items after.
>>> od = collections.OrderedDict(zip(range(5), 'abcde'))
>>> before, item, after = partition_dict(od, 3)
>>> before
OrderedDict([(0, 'a'), (1, 'b'), (2, 'c')])
>>> item
'd'
>>> after
OrderedDict([(4, 'e')])
Like string.partition, if the key is not found in the items,
the before will contain all items, item will be None, and
after will be an empty iterable.
>>>
|
python
|
{
"resource": ""
}
|
q276106
|
GroupbySaved.get_first_n_queues
|
test
|
def get_first_n_queues(self, n):
"""
Run through the sequence until n queues are created and return
them. If fewer are created, return those plus empty iterables to
|
python
|
{
"resource": ""
}
|
q276107
|
Reusable.reset
|
test
|
def reset(self):
"""
Resets the iterator to the start.
Any remaining values in the current iteration are discarded.
"""
|
python
|
{
"resource": ""
}
|
q276108
|
parse_as_var
|
test
|
def parse_as_var(parser, token):
"""
Parse the remainder of the token, to find a "as varname" statement.
:param parser: The "parser" object that ``@register.tag`` provides.
:type parser: :class:`~django.template.Parser`
:param token: The "token" object that ``@register.tag`` provides.
:type token: :class:`~django.template.Token` or splitted bits
"""
if isinstance(token, Token):
bits =
|
python
|
{
"resource": ""
}
|
q276109
|
template_tag
|
test
|
def template_tag(library, name):
"""
Decorator to register class tags
:param library: The template tag library, typically instantiated as ``register = Library()``.
:type library: :class:`~django.template.Library`
:param name: The name of the template tag
:type name: str
Example:
.. code-block:: python
@template_tag(register, 'my_tag')
class MyTag(BaseNode):
pass
"""
def _inner(cls):
if hasattr(cls, 'parse'):
compile_function = cls.parse
|
python
|
{
"resource": ""
}
|
q276110
|
PublicKeychain.descendant
|
test
|
def descendant(self, chain_path):
""" A descendant is a child many steps down.
"""
public_child = self.hdkeychain
chain_step_bytes = 4
max_bits_per_step = 2**31
chain_steps = [
int(chain_path[i:i+chain_step_bytes*2], 16) % max_bits_per_step
for
|
python
|
{
"resource": ""
}
|
q276111
|
SQLiteSchemaExtractor.fetch_sqlite_master
|
test
|
def fetch_sqlite_master(self):
"""
Get sqlite_master table information as a list of dictionaries.
:return: sqlite_master table information.
:rtype: list
:Sample Code:
.. code:: python
from sqliteschema import SQLiteSchemaExtractor
print(json.dumps(SQLiteSchemaExtractor("sample.sqlite").fetch_sqlite_master(), indent=4))
:Output:
.. code-block:: json
[
{
"tbl_name": "sample_table",
"sql": "CREATE TABLE 'sample_table' ('a' INTEGER, 'b' REAL, 'c' TEXT, 'd' REAL, 'e' TEXT)",
"type": "table",
"name": "sample_table",
"rootpage": 2
},
{
"tbl_name": "sample_table",
"sql": "CREATE INDEX sample_table_a_index ON sample_table('a')",
"type": "index",
"name": "sample_table_a_index",
"rootpage": 3
}
]
"""
sqlite_master_record_list
|
python
|
{
"resource": ""
}
|
q276112
|
object_iter
|
test
|
def object_iter(obj, parent=None, parent_key=None, idx=None,
siblings=None):
"""Yields each node of object graph in postorder."""
obj_node = Node(value=obj, parent=parent, parent_key=parent_key,
siblings=siblings, idx=idx)
|
python
|
{
"resource": ""
}
|
q276113
|
select
|
test
|
def select(selector, obj):
"""Appy selector to obj and return matching nodes.
If only one node is found, return it, otherwise return a list of matches.
Returns False on syntax error. None if no results found.
"""
parser = Parser(obj)
try:
|
python
|
{
"resource": ""
}
|
q276114
|
Parser.parse
|
test
|
def parse(self, selector):
"""Accept a list of tokens. Returns matched nodes of self.obj."""
log.debug(self.obj)
tokens = lex(selector)
if self.peek(tokens, 'operator') == '*':
self.match(tokens, 'operator')
|
python
|
{
"resource": ""
}
|
q276115
|
Parser.selector_production
|
test
|
def selector_production(self, tokens):
"""Production for a full selector."""
validators = []
# the following productions should return predicate functions.
if self.peek(tokens, 'type'):
type_ = self.match(tokens, 'type')
validators.append(self.type_production(type_))
if self.peek(tokens, 'identifier'):
key = self.match(tokens, 'identifier')
validators.append(self.key_production(key))
if self.peek(tokens, 'pclass'):
pclass = self.match(tokens, 'pclass')
validators.append(self.pclass_production(pclass))
if self.peek(tokens, 'nth_func'):
nth_func = self.match(tokens, 'nth_func')
validators.append(self.nth_child_production(nth_func, tokens))
if self.peek(tokens, 'pclass_func'):
pclass_func = self.match(tokens, 'pclass_func')
validators.append(self.pclass_func_production(pclass_func, tokens))
if not len(validators):
raise SelectorSyntaxError('no selector recognized.')
# apply validators from a selector expression to self.obj
results = self._match_nodes(validators, self.obj)
if self.peek(tokens, 'operator'):
operator = self.match(tokens, 'operator')
rvals = self.selector_production(tokens)
|
python
|
{
"resource": ""
}
|
q276116
|
Parser.parents
|
test
|
def parents(self, lhs, rhs):
"""Find nodes in rhs which have parents in lhs."""
|
python
|
{
"resource": ""
}
|
q276117
|
Parser.ancestors
|
test
|
def ancestors(self, lhs, rhs):
"""Return nodes from rhs which have ancestors in lhs."""
def _search(node):
if node in lhs:
return True
if
|
python
|
{
"resource": ""
}
|
q276118
|
Parser.siblings
|
test
|
def siblings(self, lhs, rhs):
"""Find nodes in rhs having common parents in lhs."""
parents = [node.parent for node in lhs]
|
python
|
{
"resource": ""
}
|
q276119
|
Parser.nth_child_production
|
test
|
def nth_child_production(self, lexeme, tokens):
"""Parse args and pass them to pclass_func_validator."""
args = self.match(tokens, 'expr')
pat = self.nth_child_pat.match(args)
if pat.group(5):
a = 2
b = 1 if pat.group(5) == 'odd' else 0
elif pat.group(6):
a = 0
b = int(pat.group(6))
else:
sign = pat.group(1) if pat.group(1) else '+'
coef = pat.group(2) if pat.group(2) else '1'
|
python
|
{
"resource": ""
}
|
q276120
|
Parser._match_nodes
|
test
|
def _match_nodes(self, validators, obj):
"""Apply each validator in validators to each node in obj.
Return each node in obj which matches all validators.
"""
results = []
for node in object_iter(obj):
|
python
|
{
"resource": ""
}
|
q276121
|
ping
|
test
|
def ping(dst, count, inter=0.2, maxwait=1000, size=64):
"""Sends ICMP echo requests to destination `dst` `count` times.
Returns a deferred which fires when responses are finished.
"""
def _then(result, p):
p.stopListening()
return result
|
python
|
{
"resource": ""
}
|
q276122
|
HTTPRequest.getBody
|
test
|
def getBody(self, url, method='GET', headers={}, data=None, socket=None):
"""Make an HTTP request and return the body
"""
if not 'User-Agent' in headers:
headers['User-Agent']
|
python
|
{
"resource": ""
}
|
q276123
|
PersistentCache.expire
|
test
|
def expire(self, age):
"""Expire any items in the cache older than `age` seconds"""
now = time.time()
cache = self._acquire_cache()
expired = [k for k, v in cache.items() if (now - v[0]) > age]
for k in expired:
|
python
|
{
"resource": ""
}
|
q276124
|
PersistentCache.set
|
test
|
def set(self, k, v):
"""Set a key `k` to value `v`"""
|
python
|
{
"resource": ""
}
|
q276125
|
PersistentCache.get
|
test
|
def get(self, k):
"""Returns key contents, and modify time"""
if self._changed():
|
python
|
{
"resource": ""
}
|
q276126
|
PersistentCache.contains
|
test
|
def contains(self, k):
"""Return True if key `k` exists"""
if self._changed():
|
python
|
{
"resource": ""
}
|
q276127
|
NistBeacon.chain_check
|
test
|
def chain_check(cls, timestamp: int) -> bool:
"""
Given a record timestamp, verify the chain integrity.
:param timestamp: UNIX time / POSIX time / Epoch time
:return: 'True' if the timestamp fits the chain. 'False' otherwise.
"""
# Creation is messy.
# You want genius, you get madness; two sides of the same coin.
# ... I'm sure this can be cleaned up. However, let's test it first.
record = cls.get_record(timestamp)
if isinstance(record, NistBeaconValue) is False:
# Don't you dare try to play me
return False
prev_record = cls.get_previous(record.timestamp)
next_record = cls.get_next(record.timestamp)
if prev_record is None and next_record is None:
# Uh, how did you manage to do this?
# I'm not even mad, that's amazing.
return False
if (
isinstance(prev_record, NistBeaconValue) and
isinstance(next_record, NistBeaconValue)
):
|
python
|
{
"resource": ""
}
|
q276128
|
NistBeaconValue.from_json
|
test
|
def from_json(cls, input_json: str) -> 'NistBeaconValue':
"""
Convert a string of JSON which represents a NIST randomness beacon
value into a 'NistBeaconValue' object.
:param input_json: JSON to build a 'Nist RandomnessBeaconValue' from
:return: A 'NistBeaconValue' object, 'None' otherwise
"""
try:
data_dict = json.loads(input_json)
except ValueError:
return None
# Our required values are "must haves". This makes it simple
# to verify we loaded everything out of JSON correctly.
required_values = {
cls._KEY_FREQUENCY: None,
cls._KEY_OUTPUT_VALUE: None,
cls._KEY_PREVIOUS_OUTPUT_VALUE: None,
cls._KEY_SEED_VALUE: None,
cls._KEY_SIGNATURE_VALUE: None,
cls._KEY_STATUS_CODE: None,
cls._KEY_TIMESTAMP: None,
cls._KEY_VERSION: None,
}
for key in required_values:
if key in data_dict:
required_values[key] = data_dict[key]
# Confirm that the required values are set, and not
|
python
|
{
"resource": ""
}
|
q276129
|
NistBeaconValue.from_xml
|
test
|
def from_xml(cls, input_xml: str) -> 'NistBeaconValue':
"""
Convert a string of XML which represents a NIST Randomness Beacon value
into a 'NistBeaconValue' object.
:param input_xml: XML to build a 'NistBeaconValue' from
:return: A 'NistBeaconValue' object, 'None' otherwise
"""
invalid_result = None
understood_namespaces = {
'nist-0.1': 'http://beacon.nist.gov/record/0.1/',
}
# Our required values are "must haves". This makes it simple
# to verify we loaded everything out of XML correctly.
required_values = {
cls._KEY_FREQUENCY: None,
cls._KEY_OUTPUT_VALUE: None,
cls._KEY_PREVIOUS_OUTPUT_VALUE: None,
cls._KEY_SEED_VALUE: None,
cls._KEY_SIGNATURE_VALUE: None,
cls._KEY_STATUS_CODE: None,
cls._KEY_TIMESTAMP: None,
cls._KEY_VERSION: None,
}
# First attempt to load the xml, return 'None' on ParseError
try:
tree = ElementTree.ElementTree(ElementTree.fromstring(input_xml))
except ElementTree.ParseError:
return invalid_result
# Using the required values, let's load the xml values in
for key in required_values:
discovered_element = tree.find(
"{0}:{1}".format('nist-0.1', key),
namespaces=understood_namespaces,
)
|
python
|
{
"resource": ""
}
|
q276130
|
MinifiedJsTemplateResponse.rendered_content
|
test
|
def rendered_content(self):
"""Returns a 'minified' version of the javascript content"""
template = self.resolve_template(self.template_name)
if django.VERSION[1] < 8:
if template.name.endswith('.min'):
return super(MinifiedJsTemplateResponse, self).rendered_content
else:
if template.template.name.endswith('.min'):
|
python
|
{
"resource": ""
}
|
q276131
|
LogFollower.get_fn
|
test
|
def get_fn(self, fn, max_lines=None):
"""Passes each parsed log line to `fn`
This is a better idea than storing a giant log file in memory
"""
stat = os.stat(self.logfile)
if (stat.st_ino == self.lastInode) and (stat.st_size == self.lastSize):
# Nothing new
return []
# Handle rollover and rotations vaguely
if (stat.st_ino != self.lastInode) or (stat.st_size < self.lastSize):
|
python
|
{
"resource": ""
}
|
q276132
|
LogFollower.get
|
test
|
def get(self, max_lines=None):
"""Returns a big list of all log lines since the last run
|
python
|
{
"resource": ""
}
|
q276133
|
TokenMixin.validate_token
|
test
|
def validate_token(self, token, expected_data=None):
"""Validate secret link token.
:param token: Token value.
:param expected_data: A dictionary of key/values that must be present
in the data part of the token (i.e. included via ``extra_data`` in
``create_token``).
"""
try:
# Load token and remove random data.
data = self.load_token(token)
# Compare expected
|
python
|
{
"resource": ""
}
|
q276134
|
EncryptedTokenMixIn.engine
|
test
|
def engine(self):
"""Get cryptographic engine."""
if not hasattr(self, '_engine'):
from cryptography.fernet import Fernet
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
digest = hashes.Hash(hashes.SHA256(), backend=default_backend())
|
python
|
{
"resource": ""
}
|
q276135
|
EmailConfirmationSerializer.compat_validate_token
|
test
|
def compat_validate_token(cls, *args, **kwargs):
"""Multiple algorithm-compatible token validation."""
data = None
for algorithm in SUPPORTED_DIGEST_ALGORITHMS:
|
python
|
{
"resource": ""
}
|
q276136
|
SecretLinkFactory.create_token
|
test
|
def create_token(cls, obj_id, data, expires_at=None):
"""Create the secret link token."""
if expires_at:
s = TimedSecretLinkSerializer(expires_at=expires_at)
|
python
|
{
"resource": ""
}
|
q276137
|
Counter32
|
test
|
def Counter32(a, b, delta):
"""32bit counter aggregator with wrapping
"""
if b < a:
c =
|
python
|
{
"resource": ""
}
|
q276138
|
Counter64
|
test
|
def Counter64(a, b, delta):
"""64bit counter aggregator with wrapping
"""
if b < a:
c = 18446744073709551615
|
python
|
{
"resource": ""
}
|
q276139
|
average_duration
|
test
|
def average_duration(total_duration, visits):
""" Method to calculate and format an average duration safely """
|
python
|
{
"resource": ""
}
|
q276140
|
TensorService.setupOutputs
|
test
|
def setupOutputs(self, config):
"""Setup output processors"""
if self.proto == 'tcp':
defaultOutput = {
'output': 'tensor.outputs.riemann.RiemannTCP',
'server': self.server,
'port': self.port
}
else:
defaultOutput = {
'output': 'tensor.outputs.riemann.RiemannUDP',
'server': self.server,
'port': self.port
}
outputs = config.get('outputs', [defaultOutput])
for output in outputs:
if not ('debug' in output):
output['debug'] = self.debug
cl = output['output'].split('.')[-1] # class
path = '.'.join(output['output'].split('.')[:-1]) # import path
# Import the module and construct the output object
outputObj = getattr(
|
python
|
{
"resource": ""
}
|
q276141
|
TensorService.setupSources
|
test
|
def setupSources(self, config):
"""Sets up source objects from the given config"""
sources = config.get('sources', [])
for source in sources:
|
python
|
{
"resource": ""
}
|
q276142
|
TensorService.sendEvent
|
test
|
def sendEvent(self, source, events):
"""Callback that all event sources call when they have a new event
or list of events
"""
if isinstance(events, list):
self.eventCounter += len(events)
else:
self.eventCounter += 1
events = [events]
queue = self._aggregateQueue(events)
if queue:
if (source in self.critical)
|
python
|
{
"resource": ""
}
|
q276143
|
TensorService.sourceWatchdog
|
test
|
def sourceWatchdog(self):
"""Watchdog timer function.
Recreates sources which have not generated events in 10*interval if
they have watchdog set to true in their configuration
"""
for i, source in enumerate(self.sources):
if not source.config.get('watchdog', False):
continue
sn = repr(source)
last = self.lastEvents.get(source, None)
if last:
try:
if last < (time.time()-(source.inter*10)):
log.msg("Trying to restart stale source %s: %ss" % (
sn, int(time.time() - last)
))
s = self.sources.pop(i)
try:
|
python
|
{
"resource": ""
}
|
q276144
|
ApacheLogParser._parse_format
|
test
|
def _parse_format(self, format):
"""
Converts the input format to a regular
expression, as well as extracting fields
Raises an exception if it couldn't compile
the generated regex.
"""
format = format.strip()
format = re.sub('[ \t]+',' ',format)
subpatterns = []
findquotes = re.compile(r'^\\"')
findreferreragent = re.compile('Referer|User-Agent')
findpercent = re.compile('^%.*t$')
lstripquotes = re.compile(r'^\\"')
rstripquotes = re.compile(r'\\"$')
header = re.compile(r'.*%\{([^\}]+)\}i')
for element in format.split(' '):
hasquotes = 0
if findquotes.search(element): hasquotes = 1
if hasquotes:
element = lstripquotes.sub('', element)
element = rstripquotes.sub('', element)
head = header.match(element)
if head:
self._names.append(head.groups()[0].lower())
self._types.append(str)
else:
self._names.append(self.alias(element))
|
python
|
{
"resource": ""
}
|
q276145
|
ApacheLogParser.parse
|
test
|
def parse(self, line):
"""
Parses a single line from the log file and returns
a dictionary of it's contents.
Raises and exception if it couldn't parse the line
"""
line = line.strip()
match = self._regex.match(line)
if match:
data = {}
for i, e in enumerate(match.groups()):
if e == "-":
|
python
|
{
"resource": ""
}
|
q276146
|
validate_expires_at
|
test
|
def validate_expires_at(form, field):
"""Validate that date is in the future."""
if form.accept.data:
if not field.data or datetime.utcnow().date() >= field.data:
raise validators.StopValidation(_(
|
python
|
{
"resource": ""
}
|
q276147
|
ApprovalForm.validate_message
|
test
|
def validate_message(form, field):
"""Validate message."""
if form.reject.data and not field.data.strip():
|
python
|
{
"resource": ""
}
|
q276148
|
verify_token
|
test
|
def verify_token():
"""Verify token and save in session if it's valid."""
try:
from .models import SecretLink
token = request.args['token']
# if the token is valid
if token and SecretLink.validate_token(token, {}):
|
python
|
{
"resource": ""
}
|
q276149
|
Device.name
|
test
|
def name(self):
""" Return a basic meaningful name based on device type """
if (
self.device_type and
|
python
|
{
"resource": ""
}
|
q276150
|
_warn_node
|
test
|
def _warn_node(self, msg, *args, **kwargs):
"""Do not warn on external images."""
if not msg.startswith('nonlocal image URI
|
python
|
{
"resource": ""
}
|
q276151
|
connect_receivers
|
test
|
def connect_receivers():
"""Connect receivers to signals."""
request_created.connect(send_email_validation)
request_confirmed.connect(send_confirmed_notifications)
request_rejected.connect(send_reject_notification)
|
python
|
{
"resource": ""
}
|
q276152
|
create_secret_link
|
test
|
def create_secret_link(request, message=None, expires_at=None):
"""Receiver for request-accepted signal."""
pid, record = get_record(request.recid)
if not record:
raise RecordNotFound(request.recid)
description = render_template(
"zenodo_accessrequests/link_description.tpl",
request=request,
record=record,
|
python
|
{
"resource": ""
}
|
q276153
|
send_accept_notification
|
test
|
def send_accept_notification(request, message=None, expires_at=None):
"""Receiver for request-accepted signal to send email notification."""
pid, record = get_record(request.recid)
_send_notification(
request.sender_email,
_("Access request accepted"),
"zenodo_accessrequests/emails/accepted.tpl",
request=request,
|
python
|
{
"resource": ""
}
|
q276154
|
send_confirmed_notifications
|
test
|
def send_confirmed_notifications(request):
"""Receiver for request-confirmed signal to send email notification."""
pid, record = get_record(request.recid)
if record is None:
current_app.logger.error("Cannot retrieve record %s. Emails not sent"
% request.recid)
|
python
|
{
"resource": ""
}
|
q276155
|
send_email_validation
|
test
|
def send_email_validation(request):
"""Receiver for request-created signal to send email notification."""
token = EmailConfirmationSerializer().create_token(
request.id, dict(email=request.sender_email)
)
pid, record = get_record(request.recid)
_send_notification(
request.sender_email,
|
python
|
{
"resource": ""
}
|
q276156
|
send_reject_notification
|
test
|
def send_reject_notification(request, message=None):
"""Receiver for request-rejected signal to send email notification."""
pid, record = get_record(request.recid)
|
python
|
{
"resource": ""
}
|
q276157
|
_send_notification
|
test
|
def _send_notification(to, subject, template, **ctx):
"""Render a template and send as email."""
msg = Message(
subject,
sender=current_app.config.get('SUPPORT_EMAIL'),
|
python
|
{
"resource": ""
}
|
q276158
|
SecretLink.create
|
test
|
def create(cls, title, owner, extra_data, description="", expires_at=None):
"""Create a new secret link."""
if isinstance(expires_at, date):
expires_at = datetime.combine(expires_at, datetime.min.time())
with db.session.begin_nested():
obj = cls(
owner=owner,
title=title,
description=description,
expires_at=expires_at,
token='',
|
python
|
{
"resource": ""
}
|
q276159
|
SecretLink.validate_token
|
test
|
def validate_token(cls, token, expected_data):
"""Validate a secret link token.
Only queries the database if token is valid to determine that the token
has not been revoked.
"""
data = SecretLinkFactory.validate_token(
token, expected_data=expected_data
|
python
|
{
"resource": ""
}
|
q276160
|
SecretLink.revoke
|
test
|
def revoke(self):
"""Revoken a secret link."""
if self.revoked_at is None:
with db.session.begin_nested():
self.revoked_at
|
python
|
{
"resource": ""
}
|
q276161
|
AccessRequest.create
|
test
|
def create(cls, recid=None, receiver=None, sender_full_name=None,
sender_email=None, justification=None, sender=None):
"""Create a new access request.
:param recid: Record id (required).
:param receiver: User object of receiver (required).
:param sender_full_name: Full name of sender (required).
:param sender_email: Email address of sender (required).
:param justification: Justification message (required).
:param sender: User object of sender (optional).
"""
sender_user_id = None if sender is None else sender.id
assert recid
assert receiver
assert sender_full_name
assert sender_email
assert justification
# Determine status
status = RequestStatus.EMAIL_VALIDATION
if sender and sender.confirmed_at:
status = RequestStatus.PENDING
with db.session.begin_nested():
# Create object
obj = cls(
|
python
|
{
"resource": ""
}
|
q276162
|
AccessRequest.get_by_receiver
|
test
|
def get_by_receiver(cls, request_id, user):
"""Get access request for a specific receiver."""
return cls.query.filter_by(
|
python
|
{
"resource": ""
}
|
q276163
|
AccessRequest.confirm_email
|
test
|
def confirm_email(self):
"""Confirm that senders email is valid."""
with db.session.begin_nested():
if self.status != RequestStatus.EMAIL_VALIDATION:
|
python
|
{
"resource": ""
}
|
q276164
|
AccessRequest.accept
|
test
|
def accept(self, message=None, expires_at=None):
"""Accept request."""
with db.session.begin_nested():
if self.status != RequestStatus.PENDING:
|
python
|
{
"resource": ""
}
|
q276165
|
AccessRequest.reject
|
test
|
def reject(self, message=None):
"""Reject request."""
with db.session.begin_nested():
if self.status != RequestStatus.PENDING:
raise
|
python
|
{
"resource": ""
}
|
q276166
|
AccessRequest.create_secret_link
|
test
|
def create_secret_link(self, title, description=None, expires_at=None):
"""Create a secret link from request."""
self.link = SecretLink.create(
title,
self.receiver,
|
python
|
{
"resource": ""
}
|
q276167
|
NistBeaconCrypto.get_hash
|
test
|
def get_hash(
cls,
version: str,
frequency: int,
timestamp: int,
seed_value: str,
prev_output: str,
status_code: str,
) -> SHA512Hash:
"""
Given required properties from a NistBeaconValue,
compute the SHA512Hash object.
:param version: NistBeaconValue.version
:param frequency: NistBeaconValue.frequency
|
python
|
{
"resource": ""
}
|
q276168
|
NistBeaconCrypto.verify
|
test
|
def verify(
cls,
timestamp: int,
message_hash: SHA512Hash,
signature: bytes,
) -> bool:
"""
Verify a given NIST message hash and signature for a beacon value.
:param timestamp: The timestamp of the record being verified.
:param message_hash:
The hash that was carried out over the message.
This is an object belonging to the `Crypto.Hash` module.
:param signature: The signature that needs to be validated.
:return: True if verification is correct. False otherwise.
|
python
|
{
"resource": ""
}
|
q276169
|
is_embargoed
|
test
|
def is_embargoed(record):
"""Template filter to check if a record is embargoed."""
return record.get('access_right') == 'embargoed' and \
|
python
|
{
"resource": ""
}
|
q276170
|
access_request
|
test
|
def access_request(pid, record, template, **kwargs):
"""Create an access request."""
recid = int(pid.pid_value)
datastore = LocalProxy(
lambda: current_app.extensions['security'].datastore)
# Record must be in restricted access mode.
if record.get('access_right') != 'restricted' or \
not record.get('access_conditions'):
abort(404)
# Record must have an owner and owner must still exists.
owners = record.get('owners', [])
record_owners = [datastore.find_user(id=owner_id) for owner_id in owners]
if not record_owners:
abort(404)
sender = None
initialdata = dict()
# Prepare initial form data
if current_user.is_authenticated:
sender = current_user
initialdata['email'] = current_user.email
if current_user.profile:
initialdata['full_name'] = current_user.profile.full_name
# Normal form validation
form = AccessRequestForm(formdata=request.form, **initialdata)
if form.validate_on_submit():
accreq = AccessRequest.create(
recid=recid,
receiver=record_owners[0],
sender_full_name=form.data['full_name'],
sender_email=form.data['email'],
justification=form.data['justification'],
sender=sender
|
python
|
{
"resource": ""
}
|
q276171
|
confirm
|
test
|
def confirm(pid, record, template, **kwargs):
"""Confirm email address."""
recid = int(pid.pid_value)
token = request.view_args['token']
# Validate token
data = EmailConfirmationSerializer.compat_validate_token(token)
if data is None:
flash(_("Invalid confirmation link."), category='danger')
return redirect(url_for("invenio_records_ui.recid", pid_value=recid))
# Validate request exists.
r = AccessRequest.query.get(data['id'])
if not r:
|
python
|
{
"resource": ""
}
|
q276172
|
SSHClient._get_endpoint
|
test
|
def _get_endpoint(self):
""" Creates a generic endpoint connection that doesn't finish
"""
return SSHCommandClientEndpoint.newConnection(
reactor, b'/bin/cat', self.username, self.hostname,
|
python
|
{
"resource": ""
}
|
q276173
|
Ordering.reverse
|
test
|
def reverse(self, col):
"""Get reverse direction of ordering."""
if col in self.options:
|
python
|
{
"resource": ""
}
|
q276174
|
Ordering.selected
|
test
|
def selected(self):
"""Get column which is being order by."""
if self._selected:
return self._selected if self.asc else \
|
python
|
{
"resource": ""
}
|
q276175
|
QueryOrdering.items
|
test
|
def items(self):
"""Get query with correct ordering."""
if self.asc is not None:
if self._selected and self.asc:
|
python
|
{
"resource": ""
}
|
q276176
|
FileVersionInfo.get_version
|
test
|
def get_version(self) -> str:
"""
Open the file referenced in this object, and scrape the version.
:return:
The version as a string, an empty string if there is no match
to the magic_line, or any file exception messages encountered.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close()
except Exception as e:
|
python
|
{
"resource": ""
}
|
q276177
|
FileVersionInfo.set_version
|
test
|
def set_version(self, new_version: str):
"""
Set the version for this given file.
:param new_version: The new version string to set.
"""
try:
f = open(self.file_path, 'r')
lines = f.readlines()
f.close()
except Exception as e:
print(str(e))
return
for idx, line in enumerate(lines):
if self.magic_line in line:
start = len(self.magic_line)
end = len(line) - self.strip_end_chars
|
python
|
{
"resource": ""
}
|
q276178
|
Source._init_ssh
|
test
|
def _init_ssh(self):
""" Configure SSH client options
"""
self.ssh_host = self.config.get('ssh_host', self.hostname)
self.known_hosts = self.config.get('ssh_knownhosts_file',
self.tensor.config.get('ssh_knownhosts_file', None))
self.ssh_keyfile = self.config.get('ssh_keyfile',
self.tensor.config.get('ssh_keyfile', None))
self.ssh_key = self.config.get('ssh_key',
self.tensor.config.get('ssh_key', None))
# Not sure why you'd bother but maybe you've got a weird policy
self.ssh_keypass = self.config.get('ssh_keypass',
self.tensor.config.get('ssh_keypass', None))
self.ssh_user = self.config.get('ssh_username',
self.tensor.config.get('ssh_username', None))
self.ssh_password = self.config.get('ssh_password',
self.tensor.config.get('ssh_password', None))
self.ssh_port = self.config.get('ssh_port',
self.tensor.config.get('ssh_port', 22))
|
python
|
{
"resource": ""
}
|
q276179
|
Source.startTimer
|
test
|
def startTimer(self):
"""Starts the timer for this source"""
self.td = self.t.start(self.inter)
|
python
|
{
"resource": ""
}
|
q276180
|
Source.tick
|
test
|
def tick(self):
"""Called for every timer tick. Calls self.get which can be a deferred
and passes that result back to the queueBack method
Returns a deferred"""
if self.sync:
if self.running:
defer.returnValue(None)
self.running = True
try:
event = yield self._get()
|
python
|
{
"resource": ""
}
|
q276181
|
index
|
test
|
def index():
"""List pending access requests and shared links."""
query = request.args.get('query', '')
order = request.args.get('sort', '-created')
try:
page = int(request.args.get('page', 1))
per_page = int(request.args.get('per_page', 20))
except (TypeError, ValueError):
abort(404)
# Delete form
form = DeleteForm(request.form)
if form.validate_on_submit():
link = SecretLink.query_by_owner(current_user).filter_by(
id=form.link.data).first()
if link.revoke():
flash(_("Shared link revoked."), category='success')
db.session.commit()
# Links
links = SecretLink.query_by_owner(current_user).filter(
SecretLink.revoked_at.is_(None)
)
# Querying
if query:
lquery = "%{0}%".format(query)
links = links.filter(
|
python
|
{
"resource": ""
}
|
q276182
|
RiemannTCP.createClient
|
test
|
def createClient(self):
"""Create a TCP connection to Riemann with automatic reconnection
"""
server = self.config.get('server', 'localhost')
port = self.config.get('port', 5555)
failover = self.config.get('failover', False)
self.factory = riemann.RiemannClientFactory(server, failover=failover)
if failover:
initial = random.choice(server)
else:
initial = server
log.msg('Connecting to Riemann on %s:%s' % (initial, port))
if self.tls:
if SSL:
self.connector = reactor.connectSSL(initial, port, self.factory,
ClientTLSContext(self.key, self.cert))
else:
log.msg('[FATAL] SSL support not available!' \
' Please install PyOpenSSL. Exiting now')
|
python
|
{
"resource": ""
}
|
q276183
|
RiemannTCP.stop
|
test
|
def stop(self):
"""Stop this client.
"""
self.t.stop()
|
python
|
{
"resource": ""
}
|
q276184
|
RiemannTCP.emptyQueue
|
test
|
def emptyQueue(self):
"""Remove all or self.queueDepth events from the queue
"""
if self.events:
if self.queueDepth and (len(self.events) > self.queueDepth):
# Remove maximum of self.queueDepth items from queue
events = self.events[:self.queueDepth]
self.events = self.events[self.queueDepth:]
else:
events = self.events
|
python
|
{
"resource": ""
}
|
q276185
|
RiemannTCP.eventsReceived
|
test
|
def eventsReceived(self, events):
"""Receives a list of events and transmits them to Riemann
Arguments:
events -- list
|
python
|
{
"resource": ""
}
|
q276186
|
RiemannUDP.createClient
|
test
|
def createClient(self):
"""Create a UDP connection to Riemann"""
server = self.config.get('server', '127.0.0.1')
port = self.config.get('port', 5555)
def connect(ip):
self.protocol = riemann.RiemannUDP(ip, port)
|
python
|
{
"resource": ""
}
|
q276187
|
ElasticSearch.createClient
|
test
|
def createClient(self):
"""Sets up HTTP connector and starts queue timer
"""
server = self.config.get('server', 'localhost')
port = int(self.config.get('port', 9200))
|
python
|
{
"resource": ""
}
|
q276188
|
RiemannProtobufMixin.encodeEvent
|
test
|
def encodeEvent(self, event):
"""Adapts an Event object to a Riemann protobuf event Event"""
pbevent = proto_pb2.Event(
time=int(event.time),
state=event.state,
service=event.service,
host=event.hostname,
description=event.description,
tags=event.tags,
ttl=event.ttl,
)
if event.metric is not None:
# I have no idea what I'm doing
if isinstance(event.metric, int):
pbevent.metric_sint64 = event.metric
|
python
|
{
"resource": ""
}
|
q276189
|
RiemannProtobufMixin.encodeMessage
|
test
|
def encodeMessage(self, events):
"""Encode a list of Tensor events with protobuf"""
message = proto_pb2.Msg(
events=[self.encodeEvent(e) for e in
|
python
|
{
"resource": ""
}
|
q276190
|
RiemannProtobufMixin.decodeMessage
|
test
|
def decodeMessage(self, data):
"""Decode a protobuf message into a list of
|
python
|
{
"resource": ""
}
|
q276191
|
RiemannProtobufMixin.sendEvents
|
test
|
def sendEvents(self, events):
"""Send a Tensor Event to Riemann"""
self.pressure += 1
|
python
|
{
"resource": ""
}
|
q276192
|
generate
|
test
|
def generate(ctx, url, *args, **kwargs):
"""
Generate preview for URL.
"""
file_previews = ctx.obj['file_previews']
options = {}
metadata = kwargs['metadata']
width = kwargs['width']
height = kwargs['height']
output_format = kwargs['format']
if metadata:
options['metadata'] = metadata.split(',')
if width:
options.setdefault('size', {})
|
python
|
{
"resource": ""
}
|
q276193
|
retrieve
|
test
|
def retrieve(ctx, preview_id, *args, **kwargs):
"""
Retreive preview results for ID.
|
python
|
{
"resource": ""
}
|
q276194
|
Worker.r_q_send
|
test
|
def r_q_send(self, msg_dict):
"""Send message dicts through r_q, and throw explicit errors for
pickle problems"""
# Check whether msg_dict can be pickled...
no_pickle_keys = self.invalid_dict_pickle_keys(msg_dict)
if no_pickle_keys == []:
self.r_q.put(msg_dict)
else:
## Explicit pickle error handling
hash_func = md5()
hash_func.update(str(msg_dict))
dict_hash = str(hash_func.hexdigest())[-7:] # Last 7 digits of hash
linesep = os.linesep
sys.stderr.write(
"{0} {1}r_q_send({2}) Can't pickle this dict:{3} '''{7}{4} {5}{7}{6}''' {7}".format(
datetime.now(),
Style.BRIGHT,
dict_hash,
Style.RESET_ALL,
Fore.MAGENTA,
msg_dict,
Style.RESET_ALL,
linesep,
)
)
## Verbose list of the offending key(s) / object attrs
## Send all output to stderr...
err_frag1 = (
Style.BRIGHT
+ " r_q_send({0}) Offending dict keys:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = Fore.YELLOW + " {0}".format(no_pickle_keys) + Style.RESET_ALL
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for key in sorted(no_pickle_keys):
|
python
|
{
"resource": ""
}
|
q276195
|
Worker.message_loop
|
test
|
def message_loop(self, t_q, r_q):
"""Loop through messages and execute tasks"""
t_msg = {}
while t_msg.get("state", "") != "__DIE__":
try:
t_msg = t_q.get(True, self.cycle_sleep) # Poll blocking
self.task = t_msg.get("task", "") # __DIE__ has no task
if self.task != "":
self.task.task_start = time.time() # Start the timer
# Send ACK to the controller who requested work on this task
self.r_q_send(
{"w_id": self.w_id, "task": self.task, "state": "__ACK__"}
)
# Update the sleep time with latest recommendations
self.cycle_sleep = self.task.worker_loop_delay
|
python
|
{
"resource": ""
}
|
q276196
|
TaskMgrStats.log_time
|
test
|
def log_time(self):
"""Return True if it's time to log"""
|
python
|
{
"resource": ""
}
|
q276197
|
SASLStateMachine.response
|
test
|
def response(self, payload):
"""
Send a response to the previously received challenge, with the given
`payload`. The payload is encoded using base64 and transmitted to the
server.
Return the next state of the state machine as tuple (see
:class:`SASLStateMachine` for details).
"""
if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE:
if payload != b"":
# XXX: either our mechanism is buggy or the server
# sent SASLState.SUCCESS before all challenge-response
# messages defined by the mechanism were sent
self._state = SASLState.FAILURE
raise SASLFailure(
None,
"protocol violation: mechanism did not"
" respond with an empty response to a"
" challenge with final data – this suggests"
" a protocol-violating early success from the server."
)
self._state = SASLState.SUCCESS
return SASLState.SUCCESS, None
if self._state !=
|
python
|
{
"resource": ""
}
|
q276198
|
SASLStateMachine.abort
|
test
|
def abort(self):
"""
Abort an initiated SASL authentication process. The expected result
state is ``failure``.
"""
if self._state == SASLState.INITIAL:
raise RuntimeError("SASL authentication hasn't
|
python
|
{
"resource": ""
}
|
q276199
|
_saslprep_do_mapping
|
test
|
def _saslprep_do_mapping(chars):
"""
Perform the stringprep mapping step of SASLprep. Operates in-place on a
list of unicode characters provided in `chars`.
"""
i = 0
while i < len(chars):
c = chars[i]
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.