id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
250,000 | limpyd/redis-limpyd-extensions | limpyd_extensions/related.py | RelatedCollectionForList.lrem | def lrem(self, *values):
"""
Do a "lrem" call with self.instance as parameter for each value. Values
must be primary keys of the related model.
The "count" argument of the final call will be 0 to remove all the
matching values.
"""
self._reverse_call(lambda related_field, value: related_field.lrem(0, value), *values) | python | def lrem(self, *values):
"""
Do a "lrem" call with self.instance as parameter for each value. Values
must be primary keys of the related model.
The "count" argument of the final call will be 0 to remove all the
matching values.
"""
self._reverse_call(lambda related_field, value: related_field.lrem(0, value), *values) | [
"def",
"lrem",
"(",
"self",
",",
"*",
"values",
")",
":",
"self",
".",
"_reverse_call",
"(",
"lambda",
"related_field",
",",
"value",
":",
"related_field",
".",
"lrem",
"(",
"0",
",",
"value",
")",
",",
"*",
"values",
")"
] | Do a "lrem" call with self.instance as parameter for each value. Values
must be primary keys of the related model.
The "count" argument of the final call will be 0 to remove all the
matching values. | [
"Do",
"a",
"lrem",
"call",
"with",
"self",
".",
"instance",
"as",
"parameter",
"for",
"each",
"value",
".",
"Values",
"must",
"be",
"primary",
"keys",
"of",
"the",
"related",
"model",
".",
"The",
"count",
"argument",
"of",
"the",
"final",
"call",
"will",
"be",
"0",
"to",
"remove",
"all",
"the",
"matching",
"values",
"."
] | 13f34e39efd2f802761457da30ab2a4213b63934 | https://github.com/limpyd/redis-limpyd-extensions/blob/13f34e39efd2f802761457da30ab2a4213b63934/limpyd_extensions/related.py#L123-L130 |
250,001 | rerb/django-fortune | fortune/views.py | PackViewSet.loaded | def loaded(self, request, *args, **kwargs):
"""Return a list of loaded Packs.
"""
serializer = self.get_serializer(list(Pack.objects.all()),
many=True)
return Response(serializer.data) | python | def loaded(self, request, *args, **kwargs):
"""Return a list of loaded Packs.
"""
serializer = self.get_serializer(list(Pack.objects.all()),
many=True)
return Response(serializer.data) | [
"def",
"loaded",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"serializer",
"=",
"self",
".",
"get_serializer",
"(",
"list",
"(",
"Pack",
".",
"objects",
".",
"all",
"(",
")",
")",
",",
"many",
"=",
"True",
")",
"return",
"Response",
"(",
"serializer",
".",
"data",
")"
] | Return a list of loaded Packs. | [
"Return",
"a",
"list",
"of",
"loaded",
"Packs",
"."
] | f84d34f616ecabd4fab8351ad7d3062cc9d6b127 | https://github.com/rerb/django-fortune/blob/f84d34f616ecabd4fab8351ad7d3062cc9d6b127/fortune/views.py#L30-L35 |
250,002 | pjuren/pyokit | src/pyokit/io/indexedFile.py | IndexedFile.indexed_file | def indexed_file(self, f):
"""
Setter for information about the file this object indexes.
:param f: a tuple of (filename, handle), either (or both) of which can be
None. If the handle is None, but filename is provided, then
handle is created from the filename. If both handle and filename
are None, or they don't match the previous values indexed by this
object, any current data in this index is cleared. If either are
not None, we require the iterator and the hash function for this
object to already be set.
"""
filename, handle = f
if handle is None and filename is not None:
handle = open(filename)
if (handle is None and filename is None) or \
(filename != self._indexed_filename) or \
(handle != self._indexed_file_handle):
self.index = {}
if ((handle is not None or filename is not None) and
(self.record_iterator is None or self.record_hash_function is None)):
raise IndexError("Setting index file failed; reason: iterator "
"(self.record_iterator) or hash function "
"(self.record_hash_function) have to be set first")
self._indexed_filename = filename
self._indexed_file_handle = handle | python | def indexed_file(self, f):
"""
Setter for information about the file this object indexes.
:param f: a tuple of (filename, handle), either (or both) of which can be
None. If the handle is None, but filename is provided, then
handle is created from the filename. If both handle and filename
are None, or they don't match the previous values indexed by this
object, any current data in this index is cleared. If either are
not None, we require the iterator and the hash function for this
object to already be set.
"""
filename, handle = f
if handle is None and filename is not None:
handle = open(filename)
if (handle is None and filename is None) or \
(filename != self._indexed_filename) or \
(handle != self._indexed_file_handle):
self.index = {}
if ((handle is not None or filename is not None) and
(self.record_iterator is None or self.record_hash_function is None)):
raise IndexError("Setting index file failed; reason: iterator "
"(self.record_iterator) or hash function "
"(self.record_hash_function) have to be set first")
self._indexed_filename = filename
self._indexed_file_handle = handle | [
"def",
"indexed_file",
"(",
"self",
",",
"f",
")",
":",
"filename",
",",
"handle",
"=",
"f",
"if",
"handle",
"is",
"None",
"and",
"filename",
"is",
"not",
"None",
":",
"handle",
"=",
"open",
"(",
"filename",
")",
"if",
"(",
"handle",
"is",
"None",
"and",
"filename",
"is",
"None",
")",
"or",
"(",
"filename",
"!=",
"self",
".",
"_indexed_filename",
")",
"or",
"(",
"handle",
"!=",
"self",
".",
"_indexed_file_handle",
")",
":",
"self",
".",
"index",
"=",
"{",
"}",
"if",
"(",
"(",
"handle",
"is",
"not",
"None",
"or",
"filename",
"is",
"not",
"None",
")",
"and",
"(",
"self",
".",
"record_iterator",
"is",
"None",
"or",
"self",
".",
"record_hash_function",
"is",
"None",
")",
")",
":",
"raise",
"IndexError",
"(",
"\"Setting index file failed; reason: iterator \"",
"\"(self.record_iterator) or hash function \"",
"\"(self.record_hash_function) have to be set first\"",
")",
"self",
".",
"_indexed_filename",
"=",
"filename",
"self",
".",
"_indexed_file_handle",
"=",
"handle"
] | Setter for information about the file this object indexes.
:param f: a tuple of (filename, handle), either (or both) of which can be
None. If the handle is None, but filename is provided, then
handle is created from the filename. If both handle and filename
are None, or they don't match the previous values indexed by this
object, any current data in this index is cleared. If either are
not None, we require the iterator and the hash function for this
object to already be set. | [
"Setter",
"for",
"information",
"about",
"the",
"file",
"this",
"object",
"indexes",
"."
] | fddae123b5d817daa39496183f19c000d9c3791f | https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/indexedFile.py#L122-L147 |
250,003 | pjuren/pyokit | src/pyokit/io/indexedFile.py | IndexedFile.write_index | def write_index(self, fh, to_str_func=str, generate=True, verbose=False):
"""
Write this index to a file.
Only the index dictionary itself is stored, no informatiom about the
indexed file, or the open filehandle is retained. The Output format is
just a tab-separated file, one record per line. The last column is the
file location for the record and all columns before that are collectively
considered to be the hash key for that record (which is probably only 1
column, but this allows us to permit tabs in hash keys).
:param fh: either a string filename or a stream-like object to
write to.
:param to_str_func: a function to convert hash values to strings. We'll
just use str() if this isn't provided.
:param generate: build the full index from the indexed file if it
hasn't already been built. This is the default, and
almost certainly what you want, otherwise just the
part of the index already constructed is written
(which might be nothing...)
:param verbose: if True, output progress messages to stderr.
"""
try:
handle = open(fh, "w")
except TypeError:
# okay, not a filename, try to treat it as a stream to write to.
handle = fh
if generate:
self.__build_index(verbose=verbose)
for key in self._index:
handle.write(to_str_func(key) + "\t" + str(self._index[key]) + "\n") | python | def write_index(self, fh, to_str_func=str, generate=True, verbose=False):
"""
Write this index to a file.
Only the index dictionary itself is stored, no informatiom about the
indexed file, or the open filehandle is retained. The Output format is
just a tab-separated file, one record per line. The last column is the
file location for the record and all columns before that are collectively
considered to be the hash key for that record (which is probably only 1
column, but this allows us to permit tabs in hash keys).
:param fh: either a string filename or a stream-like object to
write to.
:param to_str_func: a function to convert hash values to strings. We'll
just use str() if this isn't provided.
:param generate: build the full index from the indexed file if it
hasn't already been built. This is the default, and
almost certainly what you want, otherwise just the
part of the index already constructed is written
(which might be nothing...)
:param verbose: if True, output progress messages to stderr.
"""
try:
handle = open(fh, "w")
except TypeError:
# okay, not a filename, try to treat it as a stream to write to.
handle = fh
if generate:
self.__build_index(verbose=verbose)
for key in self._index:
handle.write(to_str_func(key) + "\t" + str(self._index[key]) + "\n") | [
"def",
"write_index",
"(",
"self",
",",
"fh",
",",
"to_str_func",
"=",
"str",
",",
"generate",
"=",
"True",
",",
"verbose",
"=",
"False",
")",
":",
"try",
":",
"handle",
"=",
"open",
"(",
"fh",
",",
"\"w\"",
")",
"except",
"TypeError",
":",
"# okay, not a filename, try to treat it as a stream to write to.",
"handle",
"=",
"fh",
"if",
"generate",
":",
"self",
".",
"__build_index",
"(",
"verbose",
"=",
"verbose",
")",
"for",
"key",
"in",
"self",
".",
"_index",
":",
"handle",
".",
"write",
"(",
"to_str_func",
"(",
"key",
")",
"+",
"\"\\t\"",
"+",
"str",
"(",
"self",
".",
"_index",
"[",
"key",
"]",
")",
"+",
"\"\\n\"",
")"
] | Write this index to a file.
Only the index dictionary itself is stored, no informatiom about the
indexed file, or the open filehandle is retained. The Output format is
just a tab-separated file, one record per line. The last column is the
file location for the record and all columns before that are collectively
considered to be the hash key for that record (which is probably only 1
column, but this allows us to permit tabs in hash keys).
:param fh: either a string filename or a stream-like object to
write to.
:param to_str_func: a function to convert hash values to strings. We'll
just use str() if this isn't provided.
:param generate: build the full index from the indexed file if it
hasn't already been built. This is the default, and
almost certainly what you want, otherwise just the
part of the index already constructed is written
(which might be nothing...)
:param verbose: if True, output progress messages to stderr. | [
"Write",
"this",
"index",
"to",
"a",
"file",
"."
] | fddae123b5d817daa39496183f19c000d9c3791f | https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/io/indexedFile.py#L248-L278 |
250,004 | eallik/spinoff | spinoff/actor/uri.py | Uri.steps | def steps(self):
"""Returns an iterable containing the steps to this `Uri` from the root `Uri`, including the root `Uri`."""
def _iter(uri, acc):
acc.appendleft(uri.name if uri.name else '')
return _iter(uri.parent, acc) if uri.parent else acc
return _iter(self, acc=deque()) | python | def steps(self):
"""Returns an iterable containing the steps to this `Uri` from the root `Uri`, including the root `Uri`."""
def _iter(uri, acc):
acc.appendleft(uri.name if uri.name else '')
return _iter(uri.parent, acc) if uri.parent else acc
return _iter(self, acc=deque()) | [
"def",
"steps",
"(",
"self",
")",
":",
"def",
"_iter",
"(",
"uri",
",",
"acc",
")",
":",
"acc",
".",
"appendleft",
"(",
"uri",
".",
"name",
"if",
"uri",
".",
"name",
"else",
"''",
")",
"return",
"_iter",
"(",
"uri",
".",
"parent",
",",
"acc",
")",
"if",
"uri",
".",
"parent",
"else",
"acc",
"return",
"_iter",
"(",
"self",
",",
"acc",
"=",
"deque",
"(",
")",
")"
] | Returns an iterable containing the steps to this `Uri` from the root `Uri`, including the root `Uri`. | [
"Returns",
"an",
"iterable",
"containing",
"the",
"steps",
"to",
"this",
"Uri",
"from",
"the",
"root",
"Uri",
"including",
"the",
"root",
"Uri",
"."
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/actor/uri.py#L51-L56 |
250,005 | eallik/spinoff | spinoff/actor/uri.py | Uri.parse | def parse(cls, addr):
"""Parses a new `Uri` instance from a string representation of a URI.
>>> u1 = Uri.parse('/foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u2 = Uri.parse('somenode:123/foo/bar')
>>> u2.node, u1.steps, u2.path, ur2.name
('somenode:123', ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u1 = Uri.parse('foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['foo', 'bar'], 'foo/bar', 'bar')
"""
if addr.endswith('/'):
raise ValueError("Uris must not end in '/'") # pragma: no cover
parts = addr.split('/')
if ':' in parts[0]:
node, parts[0] = parts[0], ''
else:
node = None
ret = None # Uri(name=None, parent=None, node=node) if node else None
for step in parts:
ret = Uri(name=step, parent=ret, node=node)
node = None # only set the node on the root Uri
return ret | python | def parse(cls, addr):
"""Parses a new `Uri` instance from a string representation of a URI.
>>> u1 = Uri.parse('/foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u2 = Uri.parse('somenode:123/foo/bar')
>>> u2.node, u1.steps, u2.path, ur2.name
('somenode:123', ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u1 = Uri.parse('foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['foo', 'bar'], 'foo/bar', 'bar')
"""
if addr.endswith('/'):
raise ValueError("Uris must not end in '/'") # pragma: no cover
parts = addr.split('/')
if ':' in parts[0]:
node, parts[0] = parts[0], ''
else:
node = None
ret = None # Uri(name=None, parent=None, node=node) if node else None
for step in parts:
ret = Uri(name=step, parent=ret, node=node)
node = None # only set the node on the root Uri
return ret | [
"def",
"parse",
"(",
"cls",
",",
"addr",
")",
":",
"if",
"addr",
".",
"endswith",
"(",
"'/'",
")",
":",
"raise",
"ValueError",
"(",
"\"Uris must not end in '/'\"",
")",
"# pragma: no cover",
"parts",
"=",
"addr",
".",
"split",
"(",
"'/'",
")",
"if",
"':'",
"in",
"parts",
"[",
"0",
"]",
":",
"node",
",",
"parts",
"[",
"0",
"]",
"=",
"parts",
"[",
"0",
"]",
",",
"''",
"else",
":",
"node",
"=",
"None",
"ret",
"=",
"None",
"# Uri(name=None, parent=None, node=node) if node else None",
"for",
"step",
"in",
"parts",
":",
"ret",
"=",
"Uri",
"(",
"name",
"=",
"step",
",",
"parent",
"=",
"ret",
",",
"node",
"=",
"node",
")",
"node",
"=",
"None",
"# only set the node on the root Uri",
"return",
"ret"
] | Parses a new `Uri` instance from a string representation of a URI.
>>> u1 = Uri.parse('/foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u2 = Uri.parse('somenode:123/foo/bar')
>>> u2.node, u1.steps, u2.path, ur2.name
('somenode:123', ['', 'foo', 'bar'], '/foo/bar', 'bar')
>>> u1 = Uri.parse('foo/bar')
>>> u1.node, u1.steps, u1.path, u1.name
(None, ['foo', 'bar'], 'foo/bar', 'bar') | [
"Parses",
"a",
"new",
"Uri",
"instance",
"from",
"a",
"string",
"representation",
"of",
"a",
"URI",
"."
] | 06b00d6b86c7422c9cb8f9a4b2915906e92b7d52 | https://github.com/eallik/spinoff/blob/06b00d6b86c7422c9cb8f9a4b2915906e92b7d52/spinoff/actor/uri.py#L69-L95 |
250,006 | benzrf/parthial | parthial/context.py | Environment.new_scope | def new_scope(self, new_scope={}):
"""Add a new innermost scope for the duration of the with block.
Args:
new_scope (dict-like): The scope to add.
"""
old_scopes, self.scopes = self.scopes, self.scopes.new_child(new_scope)
yield
self.scopes = old_scopes | python | def new_scope(self, new_scope={}):
"""Add a new innermost scope for the duration of the with block.
Args:
new_scope (dict-like): The scope to add.
"""
old_scopes, self.scopes = self.scopes, self.scopes.new_child(new_scope)
yield
self.scopes = old_scopes | [
"def",
"new_scope",
"(",
"self",
",",
"new_scope",
"=",
"{",
"}",
")",
":",
"old_scopes",
",",
"self",
".",
"scopes",
"=",
"self",
".",
"scopes",
",",
"self",
".",
"scopes",
".",
"new_child",
"(",
"new_scope",
")",
"yield",
"self",
".",
"scopes",
"=",
"old_scopes"
] | Add a new innermost scope for the duration of the with block.
Args:
new_scope (dict-like): The scope to add. | [
"Add",
"a",
"new",
"innermost",
"scope",
"for",
"the",
"duration",
"of",
"the",
"with",
"block",
"."
] | ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f | https://github.com/benzrf/parthial/blob/ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f/parthial/context.py#L51-L59 |
250,007 | benzrf/parthial | parthial/context.py | Environment.new | def new(self, val):
"""Add a new value to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value.
Raises:
~parthial.errs.LimitationError: If I already contain the maximum
number of elements.
"""
if len(self.things) >= self.max_things:
raise LimitationError('too many things')
self.things.add(val)
return val | python | def new(self, val):
"""Add a new value to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value.
Raises:
~parthial.errs.LimitationError: If I already contain the maximum
number of elements.
"""
if len(self.things) >= self.max_things:
raise LimitationError('too many things')
self.things.add(val)
return val | [
"def",
"new",
"(",
"self",
",",
"val",
")",
":",
"if",
"len",
"(",
"self",
".",
"things",
")",
">=",
"self",
".",
"max_things",
":",
"raise",
"LimitationError",
"(",
"'too many things'",
")",
"self",
".",
"things",
".",
"add",
"(",
"val",
")",
"return",
"val"
] | Add a new value to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value.
Raises:
~parthial.errs.LimitationError: If I already contain the maximum
number of elements. | [
"Add",
"a",
"new",
"value",
"to",
"me",
"."
] | ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f | https://github.com/benzrf/parthial/blob/ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f/parthial/context.py#L61-L77 |
250,008 | benzrf/parthial | parthial/context.py | Environment.rec_new | def rec_new(self, val):
"""Recursively add a new value and its children to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value.
"""
if val not in self.things:
for child in val.children():
self.rec_new(child)
self.new(val)
return val | python | def rec_new(self, val):
"""Recursively add a new value and its children to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value.
"""
if val not in self.things:
for child in val.children():
self.rec_new(child)
self.new(val)
return val | [
"def",
"rec_new",
"(",
"self",
",",
"val",
")",
":",
"if",
"val",
"not",
"in",
"self",
".",
"things",
":",
"for",
"child",
"in",
"val",
".",
"children",
"(",
")",
":",
"self",
".",
"rec_new",
"(",
"child",
")",
"self",
".",
"new",
"(",
"val",
")",
"return",
"val"
] | Recursively add a new value and its children to me.
Args:
val (LispVal): The value to be added.
Returns:
LispVal: The added value. | [
"Recursively",
"add",
"a",
"new",
"value",
"and",
"its",
"children",
"to",
"me",
"."
] | ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f | https://github.com/benzrf/parthial/blob/ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f/parthial/context.py#L79-L92 |
250,009 | benzrf/parthial | parthial/context.py | Environment.add_rec_new | def add_rec_new(self, k, val):
"""Recursively add a new value and its children to me, and assign a
variable to it.
Args:
k (str): The name of the variable to assign.
val (LispVal): The value to be added and assigned.
Returns:
LispVal: The added value.
"""
self.rec_new(val)
self[k] = val
return val | python | def add_rec_new(self, k, val):
"""Recursively add a new value and its children to me, and assign a
variable to it.
Args:
k (str): The name of the variable to assign.
val (LispVal): The value to be added and assigned.
Returns:
LispVal: The added value.
"""
self.rec_new(val)
self[k] = val
return val | [
"def",
"add_rec_new",
"(",
"self",
",",
"k",
",",
"val",
")",
":",
"self",
".",
"rec_new",
"(",
"val",
")",
"self",
"[",
"k",
"]",
"=",
"val",
"return",
"val"
] | Recursively add a new value and its children to me, and assign a
variable to it.
Args:
k (str): The name of the variable to assign.
val (LispVal): The value to be added and assigned.
Returns:
LispVal: The added value. | [
"Recursively",
"add",
"a",
"new",
"value",
"and",
"its",
"children",
"to",
"me",
"and",
"assign",
"a",
"variable",
"to",
"it",
"."
] | ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f | https://github.com/benzrf/parthial/blob/ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f/parthial/context.py#L94-L107 |
250,010 | benzrf/parthial | parthial/context.py | Context.eval | def eval(self, expr):
"""Evaluate an expression.
This does **not** add its argument (or its result) as an element of me!
That is the responsibility of the code that created the object. This
means that you need to :meth:`Environment.rec_new` any expression you
get from user input before evaluating it.
This, and any wrappers around it, are the **only** entry points to
expression evaluation you should call from ordinary code (i.e., code
that isn't part of a extension).
Args:
expr (LispVal): The expression to evaluate.
Returns:
LispVal: The result of evaluating the expression.
Raises:
~parthial.errs.LimitationError: If evaluating the expression would
require more nesting, more time, or the allocation of more
values than is permissible.
"""
if self.depth >= self.max_depth:
raise LimitationError('too much nesting')
if self.steps >= self.max_steps:
raise LimitationError('too many steps')
self.depth += 1
self.steps += 1
res = expr.eval(self)
self.depth -= 1
return res | python | def eval(self, expr):
"""Evaluate an expression.
This does **not** add its argument (or its result) as an element of me!
That is the responsibility of the code that created the object. This
means that you need to :meth:`Environment.rec_new` any expression you
get from user input before evaluating it.
This, and any wrappers around it, are the **only** entry points to
expression evaluation you should call from ordinary code (i.e., code
that isn't part of a extension).
Args:
expr (LispVal): The expression to evaluate.
Returns:
LispVal: The result of evaluating the expression.
Raises:
~parthial.errs.LimitationError: If evaluating the expression would
require more nesting, more time, or the allocation of more
values than is permissible.
"""
if self.depth >= self.max_depth:
raise LimitationError('too much nesting')
if self.steps >= self.max_steps:
raise LimitationError('too many steps')
self.depth += 1
self.steps += 1
res = expr.eval(self)
self.depth -= 1
return res | [
"def",
"eval",
"(",
"self",
",",
"expr",
")",
":",
"if",
"self",
".",
"depth",
">=",
"self",
".",
"max_depth",
":",
"raise",
"LimitationError",
"(",
"'too much nesting'",
")",
"if",
"self",
".",
"steps",
">=",
"self",
".",
"max_steps",
":",
"raise",
"LimitationError",
"(",
"'too many steps'",
")",
"self",
".",
"depth",
"+=",
"1",
"self",
".",
"steps",
"+=",
"1",
"res",
"=",
"expr",
".",
"eval",
"(",
"self",
")",
"self",
".",
"depth",
"-=",
"1",
"return",
"res"
] | Evaluate an expression.
This does **not** add its argument (or its result) as an element of me!
That is the responsibility of the code that created the object. This
means that you need to :meth:`Environment.rec_new` any expression you
get from user input before evaluating it.
This, and any wrappers around it, are the **only** entry points to
expression evaluation you should call from ordinary code (i.e., code
that isn't part of a extension).
Args:
expr (LispVal): The expression to evaluate.
Returns:
LispVal: The result of evaluating the expression.
Raises:
~parthial.errs.LimitationError: If evaluating the expression would
require more nesting, more time, or the allocation of more
values than is permissible. | [
"Evaluate",
"an",
"expression",
"."
] | ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f | https://github.com/benzrf/parthial/blob/ab1e316aec87ed34dda0ec0e145fe0c8cc8e907f/parthial/context.py#L212-L243 |
250,011 | brbsix/subsystem | subsystem/subsystem.py | error | def error(*args):
"""Display error message via stderr or GUI."""
if sys.stdin.isatty():
print('ERROR:', *args, file=sys.stderr)
else:
notify_error(*args) | python | def error(*args):
"""Display error message via stderr or GUI."""
if sys.stdin.isatty():
print('ERROR:', *args, file=sys.stderr)
else:
notify_error(*args) | [
"def",
"error",
"(",
"*",
"args",
")",
":",
"if",
"sys",
".",
"stdin",
".",
"isatty",
"(",
")",
":",
"print",
"(",
"'ERROR:'",
",",
"*",
"args",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"else",
":",
"notify_error",
"(",
"*",
"args",
")"
] | Display error message via stderr or GUI. | [
"Display",
"error",
"message",
"via",
"stderr",
"or",
"GUI",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L168-L173 |
250,012 | brbsix/subsystem | subsystem/subsystem.py | have | def have(cmd):
"""Determine whether supplied argument is a command on the PATH."""
try:
# Python 3.3+ only
from shutil import which
except ImportError:
def which(cmd):
"""
Given a command, return the path which conforms to the given mode
on the PATH, or None if there is no such file.
"""
def _access_check(path):
"""
Check that a given file can be accessed with the correct mode.
Additionally check that `path` is not a directory.
"""
return (os.path.exists(path) and os.access(
path, os.F_OK | os.X_OK) and not os.path.isdir(path))
# If we're given a path with a directory part, look it up directly
# rather than referring to PATH directories. This includes checking
# relative to the current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd):
return cmd
return None
paths = os.environ.get('PATH', os.defpath.lstrip(':')).split(':')
seen = set()
for path in paths:
if path not in seen:
seen.add(path)
name = os.path.join(path, cmd)
if _access_check(name):
return name
return None
return which(cmd) is not None | python | def have(cmd):
"""Determine whether supplied argument is a command on the PATH."""
try:
# Python 3.3+ only
from shutil import which
except ImportError:
def which(cmd):
"""
Given a command, return the path which conforms to the given mode
on the PATH, or None if there is no such file.
"""
def _access_check(path):
"""
Check that a given file can be accessed with the correct mode.
Additionally check that `path` is not a directory.
"""
return (os.path.exists(path) and os.access(
path, os.F_OK | os.X_OK) and not os.path.isdir(path))
# If we're given a path with a directory part, look it up directly
# rather than referring to PATH directories. This includes checking
# relative to the current directory, e.g. ./script
if os.path.dirname(cmd):
if _access_check(cmd):
return cmd
return None
paths = os.environ.get('PATH', os.defpath.lstrip(':')).split(':')
seen = set()
for path in paths:
if path not in seen:
seen.add(path)
name = os.path.join(path, cmd)
if _access_check(name):
return name
return None
return which(cmd) is not None | [
"def",
"have",
"(",
"cmd",
")",
":",
"try",
":",
"# Python 3.3+ only",
"from",
"shutil",
"import",
"which",
"except",
"ImportError",
":",
"def",
"which",
"(",
"cmd",
")",
":",
"\"\"\"\n Given a command, return the path which conforms to the given mode\n on the PATH, or None if there is no such file.\n \"\"\"",
"def",
"_access_check",
"(",
"path",
")",
":",
"\"\"\"\n Check that a given file can be accessed with the correct mode.\n Additionally check that `path` is not a directory.\n \"\"\"",
"return",
"(",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
"and",
"os",
".",
"access",
"(",
"path",
",",
"os",
".",
"F_OK",
"|",
"os",
".",
"X_OK",
")",
"and",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
")",
"# If we're given a path with a directory part, look it up directly",
"# rather than referring to PATH directories. This includes checking",
"# relative to the current directory, e.g. ./script",
"if",
"os",
".",
"path",
".",
"dirname",
"(",
"cmd",
")",
":",
"if",
"_access_check",
"(",
"cmd",
")",
":",
"return",
"cmd",
"return",
"None",
"paths",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'PATH'",
",",
"os",
".",
"defpath",
".",
"lstrip",
"(",
"':'",
")",
")",
".",
"split",
"(",
"':'",
")",
"seen",
"=",
"set",
"(",
")",
"for",
"path",
"in",
"paths",
":",
"if",
"path",
"not",
"in",
"seen",
":",
"seen",
".",
"add",
"(",
"path",
")",
"name",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"cmd",
")",
"if",
"_access_check",
"(",
"name",
")",
":",
"return",
"name",
"return",
"None",
"return",
"which",
"(",
"cmd",
")",
"is",
"not",
"None"
] | Determine whether supplied argument is a command on the PATH. | [
"Determine",
"whether",
"supplied",
"argument",
"is",
"a",
"command",
"on",
"the",
"PATH",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L198-L238 |
250,013 | brbsix/subsystem | subsystem/subsystem.py | multithreader | def multithreader(args, paths):
"""Execute multiple processes at once."""
def shellprocess(path):
"""Return a ready-to-use subprocess."""
import subprocess
return subprocess.Popen(args + [path],
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL)
processes = [shellprocess(path) for path in paths]
for process in processes:
process.wait() | python | def multithreader(args, paths):
"""Execute multiple processes at once."""
def shellprocess(path):
"""Return a ready-to-use subprocess."""
import subprocess
return subprocess.Popen(args + [path],
stderr=subprocess.DEVNULL,
stdout=subprocess.DEVNULL)
processes = [shellprocess(path) for path in paths]
for process in processes:
process.wait() | [
"def",
"multithreader",
"(",
"args",
",",
"paths",
")",
":",
"def",
"shellprocess",
"(",
"path",
")",
":",
"\"\"\"Return a ready-to-use subprocess.\"\"\"",
"import",
"subprocess",
"return",
"subprocess",
".",
"Popen",
"(",
"args",
"+",
"[",
"path",
"]",
",",
"stderr",
"=",
"subprocess",
".",
"DEVNULL",
",",
"stdout",
"=",
"subprocess",
".",
"DEVNULL",
")",
"processes",
"=",
"[",
"shellprocess",
"(",
"path",
")",
"for",
"path",
"in",
"paths",
"]",
"for",
"process",
"in",
"processes",
":",
"process",
".",
"wait",
"(",
")"
] | Execute multiple processes at once. | [
"Execute",
"multiple",
"processes",
"at",
"once",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L276-L289 |
250,014 | brbsix/subsystem | subsystem/subsystem.py | prompt_gui | def prompt_gui(path):
"""Prompt for a new filename via GUI."""
import subprocess
filepath, extension = os.path.splitext(path)
basename = os.path.basename(filepath)
dirname = os.path.dirname(filepath)
retry_text = 'Sorry, please try again...'
icon = 'video-x-generic'
# detect and configure dialog program
if have('yad'):
args = ['yad',
'--borders=5',
'--entry',
'--entry-label=Filename:',
'--entry-text=' + basename,
'--title=Batch Tool',
'--window-icon=' + icon]
retry_args = args + ['--text=<b>' + retry_text + '</b>',
'--text-align=center']
elif have('zenity'):
base = ['zenity',
'--entry',
'--entry-text=' + basename,
'--title=Batch Tool',
'--window-icon=info']
args = base + ['--text=Filename:']
retry_args = base + ['--text=' + retry_text]
else:
fatal('Please install yad (or zenity)')
# display filename prompt
try:
new_basename = subprocess.check_output(
args, universal_newlines=True).strip()
except subprocess.CalledProcessError:
sys.exit(1)
# retry prompt if new filename already exists
while os.path.exists(os.path.join(dirname, new_basename + extension)) and \
new_basename != basename:
try:
new_basename = subprocess.check_output(
retry_args, universal_newlines=True).strip()
except subprocess.CalledProcessError:
sys.exit(1)
if new_basename == '':
new_basename = basename
return os.path.join(dirname, new_basename + extension) | python | def prompt_gui(path):
"""Prompt for a new filename via GUI."""
import subprocess
filepath, extension = os.path.splitext(path)
basename = os.path.basename(filepath)
dirname = os.path.dirname(filepath)
retry_text = 'Sorry, please try again...'
icon = 'video-x-generic'
# detect and configure dialog program
if have('yad'):
args = ['yad',
'--borders=5',
'--entry',
'--entry-label=Filename:',
'--entry-text=' + basename,
'--title=Batch Tool',
'--window-icon=' + icon]
retry_args = args + ['--text=<b>' + retry_text + '</b>',
'--text-align=center']
elif have('zenity'):
base = ['zenity',
'--entry',
'--entry-text=' + basename,
'--title=Batch Tool',
'--window-icon=info']
args = base + ['--text=Filename:']
retry_args = base + ['--text=' + retry_text]
else:
fatal('Please install yad (or zenity)')
# display filename prompt
try:
new_basename = subprocess.check_output(
args, universal_newlines=True).strip()
except subprocess.CalledProcessError:
sys.exit(1)
# retry prompt if new filename already exists
while os.path.exists(os.path.join(dirname, new_basename + extension)) and \
new_basename != basename:
try:
new_basename = subprocess.check_output(
retry_args, universal_newlines=True).strip()
except subprocess.CalledProcessError:
sys.exit(1)
if new_basename == '':
new_basename = basename
return os.path.join(dirname, new_basename + extension) | [
"def",
"prompt_gui",
"(",
"path",
")",
":",
"import",
"subprocess",
"filepath",
",",
"extension",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path",
")",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filepath",
")",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"filepath",
")",
"retry_text",
"=",
"'Sorry, please try again...'",
"icon",
"=",
"'video-x-generic'",
"# detect and configure dialog program",
"if",
"have",
"(",
"'yad'",
")",
":",
"args",
"=",
"[",
"'yad'",
",",
"'--borders=5'",
",",
"'--entry'",
",",
"'--entry-label=Filename:'",
",",
"'--entry-text='",
"+",
"basename",
",",
"'--title=Batch Tool'",
",",
"'--window-icon='",
"+",
"icon",
"]",
"retry_args",
"=",
"args",
"+",
"[",
"'--text=<b>'",
"+",
"retry_text",
"+",
"'</b>'",
",",
"'--text-align=center'",
"]",
"elif",
"have",
"(",
"'zenity'",
")",
":",
"base",
"=",
"[",
"'zenity'",
",",
"'--entry'",
",",
"'--entry-text='",
"+",
"basename",
",",
"'--title=Batch Tool'",
",",
"'--window-icon=info'",
"]",
"args",
"=",
"base",
"+",
"[",
"'--text=Filename:'",
"]",
"retry_args",
"=",
"base",
"+",
"[",
"'--text='",
"+",
"retry_text",
"]",
"else",
":",
"fatal",
"(",
"'Please install yad (or zenity)'",
")",
"# display filename prompt",
"try",
":",
"new_basename",
"=",
"subprocess",
".",
"check_output",
"(",
"args",
",",
"universal_newlines",
"=",
"True",
")",
".",
"strip",
"(",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"# retry prompt if new filename already exists",
"while",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"new_basename",
"+",
"extension",
")",
")",
"and",
"new_basename",
"!=",
"basename",
":",
"try",
":",
"new_basename",
"=",
"subprocess",
".",
"check_output",
"(",
"retry_args",
",",
"universal_newlines",
"=",
"True",
")",
".",
"strip",
"(",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"new_basename",
"==",
"''",
":",
"new_basename",
"=",
"basename",
"return",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"new_basename",
"+",
"extension",
")"
] | Prompt for a new filename via GUI. | [
"Prompt",
"for",
"a",
"new",
"filename",
"via",
"GUI",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L386-L441 |
250,015 | brbsix/subsystem | subsystem/subsystem.py | prompt_terminal | def prompt_terminal(path):
"""Prompt for a new filename via terminal."""
def rlinput(prompt_msg, prefill=''):
"""
One line is read from standard input. Display `prompt_msg` on
standard error. `prefill` is placed into the editing buffer
before editing begins.
"""
import readline
readline.set_startup_hook(lambda: readline.insert_text(prefill))
try:
return input(prompt_msg)
finally:
readline.set_startup_hook()
filepath, extension = os.path.splitext(path)
basename = os.path.basename(filepath)
dirname = os.path.dirname(filepath)
# display filename prompt
new_basename = rlinput('Filename: ', basename)
# retry prompt if new filename already exists
while os.path.exists(os.path.join(dirname, new_basename + extension)) and \
new_basename != basename:
new_basename = rlinput('Sorry, please try again... Filename: ',
basename)
if new_basename == '':
new_basename = basename
return os.path.join(dirname, new_basename + extension) | python | def prompt_terminal(path):
"""Prompt for a new filename via terminal."""
def rlinput(prompt_msg, prefill=''):
"""
One line is read from standard input. Display `prompt_msg` on
standard error. `prefill` is placed into the editing buffer
before editing begins.
"""
import readline
readline.set_startup_hook(lambda: readline.insert_text(prefill))
try:
return input(prompt_msg)
finally:
readline.set_startup_hook()
filepath, extension = os.path.splitext(path)
basename = os.path.basename(filepath)
dirname = os.path.dirname(filepath)
# display filename prompt
new_basename = rlinput('Filename: ', basename)
# retry prompt if new filename already exists
while os.path.exists(os.path.join(dirname, new_basename + extension)) and \
new_basename != basename:
new_basename = rlinput('Sorry, please try again... Filename: ',
basename)
if new_basename == '':
new_basename = basename
return os.path.join(dirname, new_basename + extension) | [
"def",
"prompt_terminal",
"(",
"path",
")",
":",
"def",
"rlinput",
"(",
"prompt_msg",
",",
"prefill",
"=",
"''",
")",
":",
"\"\"\"\n One line is read from standard input. Display `prompt_msg` on\n standard error. `prefill` is placed into the editing buffer\n before editing begins.\n \"\"\"",
"import",
"readline",
"readline",
".",
"set_startup_hook",
"(",
"lambda",
":",
"readline",
".",
"insert_text",
"(",
"prefill",
")",
")",
"try",
":",
"return",
"input",
"(",
"prompt_msg",
")",
"finally",
":",
"readline",
".",
"set_startup_hook",
"(",
")",
"filepath",
",",
"extension",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"path",
")",
"basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filepath",
")",
"dirname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"filepath",
")",
"# display filename prompt",
"new_basename",
"=",
"rlinput",
"(",
"'Filename: '",
",",
"basename",
")",
"# retry prompt if new filename already exists",
"while",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"new_basename",
"+",
"extension",
")",
")",
"and",
"new_basename",
"!=",
"basename",
":",
"new_basename",
"=",
"rlinput",
"(",
"'Sorry, please try again... Filename: '",
",",
"basename",
")",
"if",
"new_basename",
"==",
"''",
":",
"new_basename",
"=",
"basename",
"return",
"os",
".",
"path",
".",
"join",
"(",
"dirname",
",",
"new_basename",
"+",
"extension",
")"
] | Prompt for a new filename via terminal. | [
"Prompt",
"for",
"a",
"new",
"filename",
"via",
"terminal",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L444-L476 |
250,016 | brbsix/subsystem | subsystem/subsystem.py | rename | def rename(path):
"""Rename a file if necessary."""
new_path = prompt(path)
if path != new_path:
try:
from shutil import move
except ImportError:
from os import rename as move
move(path, new_path)
return new_path | python | def rename(path):
"""Rename a file if necessary."""
new_path = prompt(path)
if path != new_path:
try:
from shutil import move
except ImportError:
from os import rename as move
move(path, new_path)
return new_path | [
"def",
"rename",
"(",
"path",
")",
":",
"new_path",
"=",
"prompt",
"(",
"path",
")",
"if",
"path",
"!=",
"new_path",
":",
"try",
":",
"from",
"shutil",
"import",
"move",
"except",
"ImportError",
":",
"from",
"os",
"import",
"rename",
"as",
"move",
"move",
"(",
"path",
",",
"new_path",
")",
"return",
"new_path"
] | Rename a file if necessary. | [
"Rename",
"a",
"file",
"if",
"necessary",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L479-L492 |
250,017 | brbsix/subsystem | subsystem/subsystem.py | scan | def scan(subtitles):
"""Remove advertising from subtitles."""
from importlib.util import find_spec
try:
import subnuker
except ImportError:
fatal('Unable to scan subtitles. Please install subnuker.')
# check whether aeidon is available
aeidon = find_spec('aeidon') is not None
if sys.stdin.isatty():
# launch subnuker from the existing terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex'] + subtitles
subnuker.main(args)
else:
# launch subnuker from a new terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex']
execute(Config.TERMINAL,
'--execute',
'subnuker',
*args + subtitles) | python | def scan(subtitles):
"""Remove advertising from subtitles."""
from importlib.util import find_spec
try:
import subnuker
except ImportError:
fatal('Unable to scan subtitles. Please install subnuker.')
# check whether aeidon is available
aeidon = find_spec('aeidon') is not None
if sys.stdin.isatty():
# launch subnuker from the existing terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex'] + subtitles
subnuker.main(args)
else:
# launch subnuker from a new terminal
args = (['--aeidon'] if aeidon else []) + \
['--gui', '--regex']
execute(Config.TERMINAL,
'--execute',
'subnuker',
*args + subtitles) | [
"def",
"scan",
"(",
"subtitles",
")",
":",
"from",
"importlib",
".",
"util",
"import",
"find_spec",
"try",
":",
"import",
"subnuker",
"except",
"ImportError",
":",
"fatal",
"(",
"'Unable to scan subtitles. Please install subnuker.'",
")",
"# check whether aeidon is available",
"aeidon",
"=",
"find_spec",
"(",
"'aeidon'",
")",
"is",
"not",
"None",
"if",
"sys",
".",
"stdin",
".",
"isatty",
"(",
")",
":",
"# launch subnuker from the existing terminal",
"args",
"=",
"(",
"[",
"'--aeidon'",
"]",
"if",
"aeidon",
"else",
"[",
"]",
")",
"+",
"[",
"'--gui'",
",",
"'--regex'",
"]",
"+",
"subtitles",
"subnuker",
".",
"main",
"(",
"args",
")",
"else",
":",
"# launch subnuker from a new terminal",
"args",
"=",
"(",
"[",
"'--aeidon'",
"]",
"if",
"aeidon",
"else",
"[",
"]",
")",
"+",
"[",
"'--gui'",
",",
"'--regex'",
"]",
"execute",
"(",
"Config",
".",
"TERMINAL",
",",
"'--execute'",
",",
"'subnuker'",
",",
"*",
"args",
"+",
"subtitles",
")"
] | Remove advertising from subtitles. | [
"Remove",
"advertising",
"from",
"subtitles",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L495-L520 |
250,018 | brbsix/subsystem | subsystem/subsystem.py | Downloader.getavailable | def getavailable(self):
"""Return a list of subtitle downloaders available."""
from importlib import import_module
available = []
for script in self.SCRIPTS:
if have(script):
available.append(script)
for module in self.MODULES:
try:
import_module(module)
available.append(module)
except ImportError:
pass
return sorted(available) | python | def getavailable(self):
"""Return a list of subtitle downloaders available."""
from importlib import import_module
available = []
for script in self.SCRIPTS:
if have(script):
available.append(script)
for module in self.MODULES:
try:
import_module(module)
available.append(module)
except ImportError:
pass
return sorted(available) | [
"def",
"getavailable",
"(",
"self",
")",
":",
"from",
"importlib",
"import",
"import_module",
"available",
"=",
"[",
"]",
"for",
"script",
"in",
"self",
".",
"SCRIPTS",
":",
"if",
"have",
"(",
"script",
")",
":",
"available",
".",
"append",
"(",
"script",
")",
"for",
"module",
"in",
"self",
".",
"MODULES",
":",
"try",
":",
"import_module",
"(",
"module",
")",
"available",
".",
"append",
"(",
"module",
")",
"except",
"ImportError",
":",
"pass",
"return",
"sorted",
"(",
"available",
")"
] | Return a list of subtitle downloaders available. | [
"Return",
"a",
"list",
"of",
"subtitle",
"downloaders",
"available",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L59-L77 |
250,019 | brbsix/subsystem | subsystem/subsystem.py | Downloader.getdefault | def getdefault(self):
"""Return an available default downloader."""
if not self.available:
error('No supported downloaders available')
print('\nPlease install one of the following:', file=sys.stderr)
print(self.SUPPORTED, file=sys.stderr)
sys.exit(1)
default = Config.DOWNLOADER_DEFAULT
if default in self.available:
return default
else:
alternative = self.available[0]
warning('Default downloader {!r} not available, using {!r} instead'
.format(Config.DOWNLOADER_DEFAULT, alternative))
return alternative | python | def getdefault(self):
"""Return an available default downloader."""
if not self.available:
error('No supported downloaders available')
print('\nPlease install one of the following:', file=sys.stderr)
print(self.SUPPORTED, file=sys.stderr)
sys.exit(1)
default = Config.DOWNLOADER_DEFAULT
if default in self.available:
return default
else:
alternative = self.available[0]
warning('Default downloader {!r} not available, using {!r} instead'
.format(Config.DOWNLOADER_DEFAULT, alternative))
return alternative | [
"def",
"getdefault",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"available",
":",
"error",
"(",
"'No supported downloaders available'",
")",
"print",
"(",
"'\\nPlease install one of the following:'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"print",
"(",
"self",
".",
"SUPPORTED",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"default",
"=",
"Config",
".",
"DOWNLOADER_DEFAULT",
"if",
"default",
"in",
"self",
".",
"available",
":",
"return",
"default",
"else",
":",
"alternative",
"=",
"self",
".",
"available",
"[",
"0",
"]",
"warning",
"(",
"'Default downloader {!r} not available, using {!r} instead'",
".",
"format",
"(",
"Config",
".",
"DOWNLOADER_DEFAULT",
",",
"alternative",
")",
")",
"return",
"alternative"
] | Return an available default downloader. | [
"Return",
"an",
"available",
"default",
"downloader",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L79-L96 |
250,020 | brbsix/subsystem | subsystem/subsystem.py | Downloader.download | def download(self, paths, tool, language):
"""Download subtitles via a number of tools."""
if tool not in self.available:
fatal('{!r} is not installed'.format(tool))
try:
from . import plugins
downloader = plugins.__getattribute__(tool)
except AttributeError:
fatal('{!r} is not a supported download tool'.format(tool))
try:
if downloader.__code__.co_argcount is 2:
downloader(paths, language)
elif downloader.__code__.co_argcount is 1:
downloader(paths)
except: # pylint: disable=bare-except
if not check_connectivity():
error('Internet connectivity appears to be disabled')
else:
error('{!r} experienced an unknown error'.format(tool)) | python | def download(self, paths, tool, language):
"""Download subtitles via a number of tools."""
if tool not in self.available:
fatal('{!r} is not installed'.format(tool))
try:
from . import plugins
downloader = plugins.__getattribute__(tool)
except AttributeError:
fatal('{!r} is not a supported download tool'.format(tool))
try:
if downloader.__code__.co_argcount is 2:
downloader(paths, language)
elif downloader.__code__.co_argcount is 1:
downloader(paths)
except: # pylint: disable=bare-except
if not check_connectivity():
error('Internet connectivity appears to be disabled')
else:
error('{!r} experienced an unknown error'.format(tool)) | [
"def",
"download",
"(",
"self",
",",
"paths",
",",
"tool",
",",
"language",
")",
":",
"if",
"tool",
"not",
"in",
"self",
".",
"available",
":",
"fatal",
"(",
"'{!r} is not installed'",
".",
"format",
"(",
"tool",
")",
")",
"try",
":",
"from",
".",
"import",
"plugins",
"downloader",
"=",
"plugins",
".",
"__getattribute__",
"(",
"tool",
")",
"except",
"AttributeError",
":",
"fatal",
"(",
"'{!r} is not a supported download tool'",
".",
"format",
"(",
"tool",
")",
")",
"try",
":",
"if",
"downloader",
".",
"__code__",
".",
"co_argcount",
"is",
"2",
":",
"downloader",
"(",
"paths",
",",
"language",
")",
"elif",
"downloader",
".",
"__code__",
".",
"co_argcount",
"is",
"1",
":",
"downloader",
"(",
"paths",
")",
"except",
":",
"# pylint: disable=bare-except",
"if",
"not",
"check_connectivity",
"(",
")",
":",
"error",
"(",
"'Internet connectivity appears to be disabled'",
")",
"else",
":",
"error",
"(",
"'{!r} experienced an unknown error'",
".",
"format",
"(",
"tool",
")",
")"
] | Download subtitles via a number of tools. | [
"Download",
"subtitles",
"via",
"a",
"number",
"of",
"tools",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L98-L119 |
250,021 | brbsix/subsystem | subsystem/subsystem.py | Downloader.epilog | def epilog(self):
"""Return text formatted for the usage description's epilog."""
bold = '\033[1m'
end = '\033[0m'
available = self.available.copy()
index = available.index(Config.DOWNLOADER_DEFAULT)
available[index] = bold + '(' + available[index] + ')' + end
formatted = ' | '.join(available)
return 'Downloaders available: ' + formatted | python | def epilog(self):
"""Return text formatted for the usage description's epilog."""
bold = '\033[1m'
end = '\033[0m'
available = self.available.copy()
index = available.index(Config.DOWNLOADER_DEFAULT)
available[index] = bold + '(' + available[index] + ')' + end
formatted = ' | '.join(available)
return 'Downloaders available: ' + formatted | [
"def",
"epilog",
"(",
"self",
")",
":",
"bold",
"=",
"'\\033[1m'",
"end",
"=",
"'\\033[0m'",
"available",
"=",
"self",
".",
"available",
".",
"copy",
"(",
")",
"index",
"=",
"available",
".",
"index",
"(",
"Config",
".",
"DOWNLOADER_DEFAULT",
")",
"available",
"[",
"index",
"]",
"=",
"bold",
"+",
"'('",
"+",
"available",
"[",
"index",
"]",
"+",
"')'",
"+",
"end",
"formatted",
"=",
"' | '",
".",
"join",
"(",
"available",
")",
"return",
"'Downloaders available: '",
"+",
"formatted"
] | Return text formatted for the usage description's epilog. | [
"Return",
"text",
"formatted",
"for",
"the",
"usage",
"description",
"s",
"epilog",
"."
] | 57705bc20d71ceaed9e22e21246265d717e98eb8 | https://github.com/brbsix/subsystem/blob/57705bc20d71ceaed9e22e21246265d717e98eb8/subsystem/subsystem.py#L121-L130 |
250,022 | alfred82santa/aio-service-client | service_client/mocks.py | MockManager.patch_mock_desc | def patch_mock_desc(self, patch, *args, **kwarg):
"""
Context manager or decorator in order to patch a mock definition of service
endpoint in a test.
:param patch: Dictionary in order to update endpoint's mock definition
:type patch: dict
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: PatchMockDescDefinition
"""
return PatchMockDescDefinition(patch, self, *args, **kwarg) | python | def patch_mock_desc(self, patch, *args, **kwarg):
"""
Context manager or decorator in order to patch a mock definition of service
endpoint in a test.
:param patch: Dictionary in order to update endpoint's mock definition
:type patch: dict
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: PatchMockDescDefinition
"""
return PatchMockDescDefinition(patch, self, *args, **kwarg) | [
"def",
"patch_mock_desc",
"(",
"self",
",",
"patch",
",",
"*",
"args",
",",
"*",
"*",
"kwarg",
")",
":",
"return",
"PatchMockDescDefinition",
"(",
"patch",
",",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwarg",
")"
] | Context manager or decorator in order to patch a mock definition of service
endpoint in a test.
:param patch: Dictionary in order to update endpoint's mock definition
:type patch: dict
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: PatchMockDescDefinition | [
"Context",
"manager",
"or",
"decorator",
"in",
"order",
"to",
"patch",
"a",
"mock",
"definition",
"of",
"service",
"endpoint",
"in",
"a",
"test",
"."
] | dd9ad49e23067b22178534915aa23ba24f6ff39b | https://github.com/alfred82santa/aio-service-client/blob/dd9ad49e23067b22178534915aa23ba24f6ff39b/service_client/mocks.py#L60-L80 |
250,023 | alfred82santa/aio-service-client | service_client/mocks.py | MockManager.use_mock | def use_mock(self, mock, *args, **kwarg):
"""
Context manager or decorator in order to use a coroutine as mock of service
endpoint in a test.
:param mock: Coroutine to use as mock. It should behave like :meth:`~ClientSession.request`.
:type mock: coroutine
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: UseMockDefinition
"""
return UseMockDefinition(mock, self, *args, **kwarg) | python | def use_mock(self, mock, *args, **kwarg):
"""
Context manager or decorator in order to use a coroutine as mock of service
endpoint in a test.
:param mock: Coroutine to use as mock. It should behave like :meth:`~ClientSession.request`.
:type mock: coroutine
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: UseMockDefinition
"""
return UseMockDefinition(mock, self, *args, **kwarg) | [
"def",
"use_mock",
"(",
"self",
",",
"mock",
",",
"*",
"args",
",",
"*",
"*",
"kwarg",
")",
":",
"return",
"UseMockDefinition",
"(",
"mock",
",",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwarg",
")"
] | Context manager or decorator in order to use a coroutine as mock of service
endpoint in a test.
:param mock: Coroutine to use as mock. It should behave like :meth:`~ClientSession.request`.
:type mock: coroutine
:param service_name: Name of service where you want to use mock. If None it will be used
as soon as possible.
:type service_name: str
:param endpoint: Endpoint where you want to use mock. If None it will be used
as soon as possible.
:type endpoint: str
:param offset: Times it must be ignored before use. Default 0. Only positive integers.
:type offset: int
:param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers.
:type limit: int
:return: UseMockDefinition | [
"Context",
"manager",
"or",
"decorator",
"in",
"order",
"to",
"use",
"a",
"coroutine",
"as",
"mock",
"of",
"service",
"endpoint",
"in",
"a",
"test",
"."
] | dd9ad49e23067b22178534915aa23ba24f6ff39b | https://github.com/alfred82santa/aio-service-client/blob/dd9ad49e23067b22178534915aa23ba24f6ff39b/service_client/mocks.py#L82-L101 |
250,024 | ploneintranet/ploneintranet.workspace | src/ploneintranet/workspace/setuphandlers.py | post_install | def post_install(context):
"""
- sets an acl user group to hold all intranet users
- setup the dynamic groups plugin
- sets the addable types for the ploneintranet policy
"""
marker = 'ploneintranet-workspace.marker'
if context.readDataFile(marker) is None:
return
portal = api.portal.get()
# Set up a group to hold all intranet users
if api.group.get(groupname=INTRANET_USERS_GROUP_ID) is None:
api.group.create(groupname=INTRANET_USERS_GROUP_ID)
# All users have Reader role on portal root
api.group.grant_roles(groupname=INTRANET_USERS_GROUP_ID,
roles=['Reader', ],
obj=portal)
# Set up dynamic groups plugin to put all users into the above group
pas = api.portal.get_tool('acl_users')
if DYNAMIC_GROUPS_PLUGIN_ID not in pas.objectIds():
addDynamicGroupsPlugin(
pas,
DYNAMIC_GROUPS_PLUGIN_ID,
"ploneintranet.workspace Dynamic Groups"
)
plugin = pas[DYNAMIC_GROUPS_PLUGIN_ID]
plugin.addGroup(
group_id=INTRANET_USERS_GROUP_ID,
predicate='python: True',
title='All Intranet Users',
description='',
active=True,
)
# activate the plugin (all interfaces)
activatePluginInterfaces(portal, DYNAMIC_GROUPS_PLUGIN_ID)
# deactivate the enumerate groups interface for collective.workspace
activatePluginInterfaces(portal, 'workspace_groups',
disable=['IGroupEnumerationPlugin'])
# Set up the ploneintranet policy for all addable types
default_types = []
types = api.portal.get_tool('portal_types')
for type_info in types.listTypeInfo():
if type_info.global_allow:
default_types.append(type_info.getId())
if default_types:
pwftool = api.portal.get_tool('portal_placeful_workflow')
policy = pwftool['ploneintranet_policy']
policy.setChainForPortalTypes(default_types, ('(Default)',)) | python | def post_install(context):
"""
- sets an acl user group to hold all intranet users
- setup the dynamic groups plugin
- sets the addable types for the ploneintranet policy
"""
marker = 'ploneintranet-workspace.marker'
if context.readDataFile(marker) is None:
return
portal = api.portal.get()
# Set up a group to hold all intranet users
if api.group.get(groupname=INTRANET_USERS_GROUP_ID) is None:
api.group.create(groupname=INTRANET_USERS_GROUP_ID)
# All users have Reader role on portal root
api.group.grant_roles(groupname=INTRANET_USERS_GROUP_ID,
roles=['Reader', ],
obj=portal)
# Set up dynamic groups plugin to put all users into the above group
pas = api.portal.get_tool('acl_users')
if DYNAMIC_GROUPS_PLUGIN_ID not in pas.objectIds():
addDynamicGroupsPlugin(
pas,
DYNAMIC_GROUPS_PLUGIN_ID,
"ploneintranet.workspace Dynamic Groups"
)
plugin = pas[DYNAMIC_GROUPS_PLUGIN_ID]
plugin.addGroup(
group_id=INTRANET_USERS_GROUP_ID,
predicate='python: True',
title='All Intranet Users',
description='',
active=True,
)
# activate the plugin (all interfaces)
activatePluginInterfaces(portal, DYNAMIC_GROUPS_PLUGIN_ID)
# deactivate the enumerate groups interface for collective.workspace
activatePluginInterfaces(portal, 'workspace_groups',
disable=['IGroupEnumerationPlugin'])
# Set up the ploneintranet policy for all addable types
default_types = []
types = api.portal.get_tool('portal_types')
for type_info in types.listTypeInfo():
if type_info.global_allow:
default_types.append(type_info.getId())
if default_types:
pwftool = api.portal.get_tool('portal_placeful_workflow')
policy = pwftool['ploneintranet_policy']
policy.setChainForPortalTypes(default_types, ('(Default)',)) | [
"def",
"post_install",
"(",
"context",
")",
":",
"marker",
"=",
"'ploneintranet-workspace.marker'",
"if",
"context",
".",
"readDataFile",
"(",
"marker",
")",
"is",
"None",
":",
"return",
"portal",
"=",
"api",
".",
"portal",
".",
"get",
"(",
")",
"# Set up a group to hold all intranet users",
"if",
"api",
".",
"group",
".",
"get",
"(",
"groupname",
"=",
"INTRANET_USERS_GROUP_ID",
")",
"is",
"None",
":",
"api",
".",
"group",
".",
"create",
"(",
"groupname",
"=",
"INTRANET_USERS_GROUP_ID",
")",
"# All users have Reader role on portal root",
"api",
".",
"group",
".",
"grant_roles",
"(",
"groupname",
"=",
"INTRANET_USERS_GROUP_ID",
",",
"roles",
"=",
"[",
"'Reader'",
",",
"]",
",",
"obj",
"=",
"portal",
")",
"# Set up dynamic groups plugin to put all users into the above group",
"pas",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'acl_users'",
")",
"if",
"DYNAMIC_GROUPS_PLUGIN_ID",
"not",
"in",
"pas",
".",
"objectIds",
"(",
")",
":",
"addDynamicGroupsPlugin",
"(",
"pas",
",",
"DYNAMIC_GROUPS_PLUGIN_ID",
",",
"\"ploneintranet.workspace Dynamic Groups\"",
")",
"plugin",
"=",
"pas",
"[",
"DYNAMIC_GROUPS_PLUGIN_ID",
"]",
"plugin",
".",
"addGroup",
"(",
"group_id",
"=",
"INTRANET_USERS_GROUP_ID",
",",
"predicate",
"=",
"'python: True'",
",",
"title",
"=",
"'All Intranet Users'",
",",
"description",
"=",
"''",
",",
"active",
"=",
"True",
",",
")",
"# activate the plugin (all interfaces)",
"activatePluginInterfaces",
"(",
"portal",
",",
"DYNAMIC_GROUPS_PLUGIN_ID",
")",
"# deactivate the enumerate groups interface for collective.workspace",
"activatePluginInterfaces",
"(",
"portal",
",",
"'workspace_groups'",
",",
"disable",
"=",
"[",
"'IGroupEnumerationPlugin'",
"]",
")",
"# Set up the ploneintranet policy for all addable types",
"default_types",
"=",
"[",
"]",
"types",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_types'",
")",
"for",
"type_info",
"in",
"types",
".",
"listTypeInfo",
"(",
")",
":",
"if",
"type_info",
".",
"global_allow",
":",
"default_types",
".",
"append",
"(",
"type_info",
".",
"getId",
"(",
")",
")",
"if",
"default_types",
":",
"pwftool",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_placeful_workflow'",
")",
"policy",
"=",
"pwftool",
"[",
"'ploneintranet_policy'",
"]",
"policy",
".",
"setChainForPortalTypes",
"(",
"default_types",
",",
"(",
"'(Default)'",
",",
")",
")"
] | - sets an acl user group to hold all intranet users
- setup the dynamic groups plugin
- sets the addable types for the ploneintranet policy | [
"-",
"sets",
"an",
"acl",
"user",
"group",
"to",
"hold",
"all",
"intranet",
"users",
"-",
"setup",
"the",
"dynamic",
"groups",
"plugin",
"-",
"sets",
"the",
"addable",
"types",
"for",
"the",
"ploneintranet",
"policy"
] | a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba | https://github.com/ploneintranet/ploneintranet.workspace/blob/a4fc7a5c61f9c6d4d4ad25478ff5250f342ffbba/src/ploneintranet/workspace/setuphandlers.py#L10-L63 |
250,025 | kodexlab/reliure | reliure/types.py | GenericType.serialize | def serialize(self, value, **kwargs):
""" pre-serialize value """
if self._serialize is not None:
return self._serialize(value, **kwargs)
else:
return value | python | def serialize(self, value, **kwargs):
""" pre-serialize value """
if self._serialize is not None:
return self._serialize(value, **kwargs)
else:
return value | [
"def",
"serialize",
"(",
"self",
",",
"value",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"_serialize",
"is",
"not",
"None",
":",
"return",
"self",
".",
"_serialize",
"(",
"value",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"value"
] | pre-serialize value | [
"pre",
"-",
"serialize",
"value"
] | 0450c7a9254c5c003162738458bbe0c49e777ba5 | https://github.com/kodexlab/reliure/blob/0450c7a9254c5c003162738458bbe0c49e777ba5/reliure/types.py#L132-L137 |
250,026 | abe-winter/pg13-py | pg13/stubredis.py | complete_message | def complete_message(buf):
"returns msg,buf_remaining or None,buf"
# todo: read dollar-length for strings; I dont think I can blindly trust newlines. learn about escaping
# note: all the length checks are +1 over what I need because I'm asking for *complete* lines.
lines=buf.split('\r\n')
if len(lines)<=1: return None,buf
nargs_raw=lines.pop(0)
assert nargs_raw[0]=='*'
nargs=int(nargs_raw[1:])
args=[]
while len(lines)>=2: # 2 because if there isn't at least a blank at the end, we're missing a terminator
if lines[0][0]=='+': args.append(lines.pop(0))
elif lines[0][0]==':': args.append(int(lines.pop(0)[1:]))
elif lines[0][0]=='$':
if len(lines)<3: return None,buf
slen,s=int(lines[0][1:]),lines[1]
if slen!=len(s): raise ValueError('length mismatch %s %r'%(slen,s)) # probably an escaping issue
lines=lines[2:]
args.append(s)
else: raise ValueError('expected initial code in %r'%lines)
if len(args)==nargs: return args,'\r\n'.join(lines)
else: return None,buf | python | def complete_message(buf):
"returns msg,buf_remaining or None,buf"
# todo: read dollar-length for strings; I dont think I can blindly trust newlines. learn about escaping
# note: all the length checks are +1 over what I need because I'm asking for *complete* lines.
lines=buf.split('\r\n')
if len(lines)<=1: return None,buf
nargs_raw=lines.pop(0)
assert nargs_raw[0]=='*'
nargs=int(nargs_raw[1:])
args=[]
while len(lines)>=2: # 2 because if there isn't at least a blank at the end, we're missing a terminator
if lines[0][0]=='+': args.append(lines.pop(0))
elif lines[0][0]==':': args.append(int(lines.pop(0)[1:]))
elif lines[0][0]=='$':
if len(lines)<3: return None,buf
slen,s=int(lines[0][1:]),lines[1]
if slen!=len(s): raise ValueError('length mismatch %s %r'%(slen,s)) # probably an escaping issue
lines=lines[2:]
args.append(s)
else: raise ValueError('expected initial code in %r'%lines)
if len(args)==nargs: return args,'\r\n'.join(lines)
else: return None,buf | [
"def",
"complete_message",
"(",
"buf",
")",
":",
"# todo: read dollar-length for strings; I dont think I can blindly trust newlines. learn about escaping\r",
"# note: all the length checks are +1 over what I need because I'm asking for *complete* lines.\r",
"lines",
"=",
"buf",
".",
"split",
"(",
"'\\r\\n'",
")",
"if",
"len",
"(",
"lines",
")",
"<=",
"1",
":",
"return",
"None",
",",
"buf",
"nargs_raw",
"=",
"lines",
".",
"pop",
"(",
"0",
")",
"assert",
"nargs_raw",
"[",
"0",
"]",
"==",
"'*'",
"nargs",
"=",
"int",
"(",
"nargs_raw",
"[",
"1",
":",
"]",
")",
"args",
"=",
"[",
"]",
"while",
"len",
"(",
"lines",
")",
">=",
"2",
":",
"# 2 because if there isn't at least a blank at the end, we're missing a terminator\r",
"if",
"lines",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"'+'",
":",
"args",
".",
"append",
"(",
"lines",
".",
"pop",
"(",
"0",
")",
")",
"elif",
"lines",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"':'",
":",
"args",
".",
"append",
"(",
"int",
"(",
"lines",
".",
"pop",
"(",
"0",
")",
"[",
"1",
":",
"]",
")",
")",
"elif",
"lines",
"[",
"0",
"]",
"[",
"0",
"]",
"==",
"'$'",
":",
"if",
"len",
"(",
"lines",
")",
"<",
"3",
":",
"return",
"None",
",",
"buf",
"slen",
",",
"s",
"=",
"int",
"(",
"lines",
"[",
"0",
"]",
"[",
"1",
":",
"]",
")",
",",
"lines",
"[",
"1",
"]",
"if",
"slen",
"!=",
"len",
"(",
"s",
")",
":",
"raise",
"ValueError",
"(",
"'length mismatch %s %r'",
"%",
"(",
"slen",
",",
"s",
")",
")",
"# probably an escaping issue\r",
"lines",
"=",
"lines",
"[",
"2",
":",
"]",
"args",
".",
"append",
"(",
"s",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'expected initial code in %r'",
"%",
"lines",
")",
"if",
"len",
"(",
"args",
")",
"==",
"nargs",
":",
"return",
"args",
",",
"'\\r\\n'",
".",
"join",
"(",
"lines",
")",
"else",
":",
"return",
"None",
",",
"buf"
] | returns msg,buf_remaining or None,buf | [
"returns",
"msg",
"buf_remaining",
"or",
"None",
"buf"
] | c78806f99f35541a8756987e86edca3438aa97f5 | https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/stubredis.py#L80-L101 |
250,027 | abe-winter/pg13-py | pg13/stubredis.py | RedisState.process_message | def process_message(self,msg,sock):
"serialize and deserialize"
command=msg[0]
try: f={'GET':self.get,'SET':self.set,'SUBSCRIBE':self.sub,'PUBLISH':self.pub,
'PING':self.ping,'GETSET':self.getset,'EXPIRE':self.expire,'DEL':self.delete}[command]
except KeyError: print msg; raise
args=msg[1:]
try: return f(sock,*args) if command in self.SOCK_COMMANDS else f(*args)
except Exception as e:
print e
print msg
return '-ERROR\r\n' | python | def process_message(self,msg,sock):
"serialize and deserialize"
command=msg[0]
try: f={'GET':self.get,'SET':self.set,'SUBSCRIBE':self.sub,'PUBLISH':self.pub,
'PING':self.ping,'GETSET':self.getset,'EXPIRE':self.expire,'DEL':self.delete}[command]
except KeyError: print msg; raise
args=msg[1:]
try: return f(sock,*args) if command in self.SOCK_COMMANDS else f(*args)
except Exception as e:
print e
print msg
return '-ERROR\r\n' | [
"def",
"process_message",
"(",
"self",
",",
"msg",
",",
"sock",
")",
":",
"command",
"=",
"msg",
"[",
"0",
"]",
"try",
":",
"f",
"=",
"{",
"'GET'",
":",
"self",
".",
"get",
",",
"'SET'",
":",
"self",
".",
"set",
",",
"'SUBSCRIBE'",
":",
"self",
".",
"sub",
",",
"'PUBLISH'",
":",
"self",
".",
"pub",
",",
"'PING'",
":",
"self",
".",
"ping",
",",
"'GETSET'",
":",
"self",
".",
"getset",
",",
"'EXPIRE'",
":",
"self",
".",
"expire",
",",
"'DEL'",
":",
"self",
".",
"delete",
"}",
"[",
"command",
"]",
"except",
"KeyError",
":",
"print",
"msg",
"raise",
"args",
"=",
"msg",
"[",
"1",
":",
"]",
"try",
":",
"return",
"f",
"(",
"sock",
",",
"*",
"args",
")",
"if",
"command",
"in",
"self",
".",
"SOCK_COMMANDS",
"else",
"f",
"(",
"*",
"args",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"e",
"print",
"msg",
"return",
"'-ERROR\\r\\n'"
] | serialize and deserialize | [
"serialize",
"and",
"deserialize"
] | c78806f99f35541a8756987e86edca3438aa97f5 | https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/stubredis.py#L22-L33 |
250,028 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._load_playbook_from_file | def _load_playbook_from_file(self, path, vars={}):
'''
run top level error checking on playbooks and allow them to include other playbooks.
'''
playbook_data = utils.parse_yaml_from_file(path)
accumulated_plays = []
play_basedirs = []
if type(playbook_data) != list:
raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list")
basedir = os.path.dirname(path) or '.'
utils.plugins.push_basedir(basedir)
for play in playbook_data:
if type(play) != dict:
raise errors.AnsibleError("parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play)
if 'include' in play:
tokens = shlex.split(play['include'])
items = ['']
for k in play.keys():
if not k.startswith("with_"):
# These are the keys allowed to be mixed with playbook includes
if k in ("include", "vars"):
continue
else:
raise errors.AnsibleError("parse error: playbook includes cannot be used with other directives: %s" % play)
plugin_name = k[5:]
if plugin_name not in utils.plugins.lookup_loader:
raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
terms = utils.template_ds(basedir, play[k], vars)
items = utils.plugins.lookup_loader.get(plugin_name, basedir=basedir, runner=None).run(terms, inject=vars)
for item in items:
incvars = vars.copy()
incvars['item'] = item
if 'vars' in play:
if isinstance(play['vars'], dict):
incvars.update(play['vars'])
elif isinstance(play['vars'], list):
for v in play['vars']:
incvars.update(v)
for t in tokens[1:]:
(k,v) = t.split("=", 1)
incvars[k] = utils.template_ds(basedir, v, incvars)
included_path = utils.path_dwim(basedir, tokens[0])
(plays, basedirs) = self._load_playbook_from_file(included_path, incvars)
for p in plays:
if 'vars' not in p:
p['vars'] = {}
if isinstance(p['vars'], dict):
p['vars'].update(incvars)
elif isinstance(p['vars'], list):
p['vars'].extend([dict(k=v) for k,v in incvars.iteritems()])
accumulated_plays.extend(plays)
play_basedirs.extend(basedirs)
else:
accumulated_plays.append(play)
play_basedirs.append(basedir)
return (accumulated_plays, play_basedirs) | python | def _load_playbook_from_file(self, path, vars={}):
'''
run top level error checking on playbooks and allow them to include other playbooks.
'''
playbook_data = utils.parse_yaml_from_file(path)
accumulated_plays = []
play_basedirs = []
if type(playbook_data) != list:
raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list")
basedir = os.path.dirname(path) or '.'
utils.plugins.push_basedir(basedir)
for play in playbook_data:
if type(play) != dict:
raise errors.AnsibleError("parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play)
if 'include' in play:
tokens = shlex.split(play['include'])
items = ['']
for k in play.keys():
if not k.startswith("with_"):
# These are the keys allowed to be mixed with playbook includes
if k in ("include", "vars"):
continue
else:
raise errors.AnsibleError("parse error: playbook includes cannot be used with other directives: %s" % play)
plugin_name = k[5:]
if plugin_name not in utils.plugins.lookup_loader:
raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name))
terms = utils.template_ds(basedir, play[k], vars)
items = utils.plugins.lookup_loader.get(plugin_name, basedir=basedir, runner=None).run(terms, inject=vars)
for item in items:
incvars = vars.copy()
incvars['item'] = item
if 'vars' in play:
if isinstance(play['vars'], dict):
incvars.update(play['vars'])
elif isinstance(play['vars'], list):
for v in play['vars']:
incvars.update(v)
for t in tokens[1:]:
(k,v) = t.split("=", 1)
incvars[k] = utils.template_ds(basedir, v, incvars)
included_path = utils.path_dwim(basedir, tokens[0])
(plays, basedirs) = self._load_playbook_from_file(included_path, incvars)
for p in plays:
if 'vars' not in p:
p['vars'] = {}
if isinstance(p['vars'], dict):
p['vars'].update(incvars)
elif isinstance(p['vars'], list):
p['vars'].extend([dict(k=v) for k,v in incvars.iteritems()])
accumulated_plays.extend(plays)
play_basedirs.extend(basedirs)
else:
accumulated_plays.append(play)
play_basedirs.append(basedir)
return (accumulated_plays, play_basedirs) | [
"def",
"_load_playbook_from_file",
"(",
"self",
",",
"path",
",",
"vars",
"=",
"{",
"}",
")",
":",
"playbook_data",
"=",
"utils",
".",
"parse_yaml_from_file",
"(",
"path",
")",
"accumulated_plays",
"=",
"[",
"]",
"play_basedirs",
"=",
"[",
"]",
"if",
"type",
"(",
"playbook_data",
")",
"!=",
"list",
":",
"raise",
"errors",
".",
"AnsibleError",
"(",
"\"parse error: playbooks must be formatted as a YAML list\"",
")",
"basedir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
"or",
"'.'",
"utils",
".",
"plugins",
".",
"push_basedir",
"(",
"basedir",
")",
"for",
"play",
"in",
"playbook_data",
":",
"if",
"type",
"(",
"play",
")",
"!=",
"dict",
":",
"raise",
"errors",
".",
"AnsibleError",
"(",
"\"parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s\"",
"%",
"play",
")",
"if",
"'include'",
"in",
"play",
":",
"tokens",
"=",
"shlex",
".",
"split",
"(",
"play",
"[",
"'include'",
"]",
")",
"items",
"=",
"[",
"''",
"]",
"for",
"k",
"in",
"play",
".",
"keys",
"(",
")",
":",
"if",
"not",
"k",
".",
"startswith",
"(",
"\"with_\"",
")",
":",
"# These are the keys allowed to be mixed with playbook includes",
"if",
"k",
"in",
"(",
"\"include\"",
",",
"\"vars\"",
")",
":",
"continue",
"else",
":",
"raise",
"errors",
".",
"AnsibleError",
"(",
"\"parse error: playbook includes cannot be used with other directives: %s\"",
"%",
"play",
")",
"plugin_name",
"=",
"k",
"[",
"5",
":",
"]",
"if",
"plugin_name",
"not",
"in",
"utils",
".",
"plugins",
".",
"lookup_loader",
":",
"raise",
"errors",
".",
"AnsibleError",
"(",
"\"cannot find lookup plugin named %s for usage in with_%s\"",
"%",
"(",
"plugin_name",
",",
"plugin_name",
")",
")",
"terms",
"=",
"utils",
".",
"template_ds",
"(",
"basedir",
",",
"play",
"[",
"k",
"]",
",",
"vars",
")",
"items",
"=",
"utils",
".",
"plugins",
".",
"lookup_loader",
".",
"get",
"(",
"plugin_name",
",",
"basedir",
"=",
"basedir",
",",
"runner",
"=",
"None",
")",
".",
"run",
"(",
"terms",
",",
"inject",
"=",
"vars",
")",
"for",
"item",
"in",
"items",
":",
"incvars",
"=",
"vars",
".",
"copy",
"(",
")",
"incvars",
"[",
"'item'",
"]",
"=",
"item",
"if",
"'vars'",
"in",
"play",
":",
"if",
"isinstance",
"(",
"play",
"[",
"'vars'",
"]",
",",
"dict",
")",
":",
"incvars",
".",
"update",
"(",
"play",
"[",
"'vars'",
"]",
")",
"elif",
"isinstance",
"(",
"play",
"[",
"'vars'",
"]",
",",
"list",
")",
":",
"for",
"v",
"in",
"play",
"[",
"'vars'",
"]",
":",
"incvars",
".",
"update",
"(",
"v",
")",
"for",
"t",
"in",
"tokens",
"[",
"1",
":",
"]",
":",
"(",
"k",
",",
"v",
")",
"=",
"t",
".",
"split",
"(",
"\"=\"",
",",
"1",
")",
"incvars",
"[",
"k",
"]",
"=",
"utils",
".",
"template_ds",
"(",
"basedir",
",",
"v",
",",
"incvars",
")",
"included_path",
"=",
"utils",
".",
"path_dwim",
"(",
"basedir",
",",
"tokens",
"[",
"0",
"]",
")",
"(",
"plays",
",",
"basedirs",
")",
"=",
"self",
".",
"_load_playbook_from_file",
"(",
"included_path",
",",
"incvars",
")",
"for",
"p",
"in",
"plays",
":",
"if",
"'vars'",
"not",
"in",
"p",
":",
"p",
"[",
"'vars'",
"]",
"=",
"{",
"}",
"if",
"isinstance",
"(",
"p",
"[",
"'vars'",
"]",
",",
"dict",
")",
":",
"p",
"[",
"'vars'",
"]",
".",
"update",
"(",
"incvars",
")",
"elif",
"isinstance",
"(",
"p",
"[",
"'vars'",
"]",
",",
"list",
")",
":",
"p",
"[",
"'vars'",
"]",
".",
"extend",
"(",
"[",
"dict",
"(",
"k",
"=",
"v",
")",
"for",
"k",
",",
"v",
"in",
"incvars",
".",
"iteritems",
"(",
")",
"]",
")",
"accumulated_plays",
".",
"extend",
"(",
"plays",
")",
"play_basedirs",
".",
"extend",
"(",
"basedirs",
")",
"else",
":",
"accumulated_plays",
".",
"append",
"(",
"play",
")",
"play_basedirs",
".",
"append",
"(",
"basedir",
")",
"return",
"(",
"accumulated_plays",
",",
"play_basedirs",
")"
] | run top level error checking on playbooks and allow them to include other playbooks. | [
"run",
"top",
"level",
"error",
"checking",
"on",
"playbooks",
"and",
"allow",
"them",
"to",
"include",
"other",
"playbooks",
"."
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L125-L186 |
250,029 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook.run | def run(self):
''' run all patterns in the playbook '''
plays = []
matched_tags_all = set()
unmatched_tags_all = set()
# loop through all patterns and run them
self.callbacks.on_start()
for (play_ds, play_basedir) in zip(self.playbook, self.play_basedirs):
play = Play(self, play_ds, play_basedir)
matched_tags, unmatched_tags = play.compare_tags(self.only_tags)
matched_tags_all = matched_tags_all | matched_tags
unmatched_tags_all = unmatched_tags_all | unmatched_tags
# if we have matched_tags, the play must be run.
# if the play contains no tasks, assume we just want to gather facts
if (len(matched_tags) > 0 or len(play.tasks()) == 0):
plays.append(play)
# if the playbook is invoked with --tags that don't exist at all in the playbooks
# then we need to raise an error so that the user can correct the arguments.
unknown_tags = set(self.only_tags) - (matched_tags_all | unmatched_tags_all)
unknown_tags.discard('all')
if len(unknown_tags) > 0:
unmatched_tags_all.discard('all')
msg = 'tag(s) not found in playbook: %s. possible values: %s'
unknown = ','.join(sorted(unknown_tags))
unmatched = ','.join(sorted(unmatched_tags_all))
raise errors.AnsibleError(msg % (unknown, unmatched))
for play in plays:
if not self._run_play(play):
break
# summarize the results
results = {}
for host in self.stats.processed.keys():
results[host] = self.stats.summarize(host)
return results | python | def run(self):
''' run all patterns in the playbook '''
plays = []
matched_tags_all = set()
unmatched_tags_all = set()
# loop through all patterns and run them
self.callbacks.on_start()
for (play_ds, play_basedir) in zip(self.playbook, self.play_basedirs):
play = Play(self, play_ds, play_basedir)
matched_tags, unmatched_tags = play.compare_tags(self.only_tags)
matched_tags_all = matched_tags_all | matched_tags
unmatched_tags_all = unmatched_tags_all | unmatched_tags
# if we have matched_tags, the play must be run.
# if the play contains no tasks, assume we just want to gather facts
if (len(matched_tags) > 0 or len(play.tasks()) == 0):
plays.append(play)
# if the playbook is invoked with --tags that don't exist at all in the playbooks
# then we need to raise an error so that the user can correct the arguments.
unknown_tags = set(self.only_tags) - (matched_tags_all | unmatched_tags_all)
unknown_tags.discard('all')
if len(unknown_tags) > 0:
unmatched_tags_all.discard('all')
msg = 'tag(s) not found in playbook: %s. possible values: %s'
unknown = ','.join(sorted(unknown_tags))
unmatched = ','.join(sorted(unmatched_tags_all))
raise errors.AnsibleError(msg % (unknown, unmatched))
for play in plays:
if not self._run_play(play):
break
# summarize the results
results = {}
for host in self.stats.processed.keys():
results[host] = self.stats.summarize(host)
return results | [
"def",
"run",
"(",
"self",
")",
":",
"plays",
"=",
"[",
"]",
"matched_tags_all",
"=",
"set",
"(",
")",
"unmatched_tags_all",
"=",
"set",
"(",
")",
"# loop through all patterns and run them",
"self",
".",
"callbacks",
".",
"on_start",
"(",
")",
"for",
"(",
"play_ds",
",",
"play_basedir",
")",
"in",
"zip",
"(",
"self",
".",
"playbook",
",",
"self",
".",
"play_basedirs",
")",
":",
"play",
"=",
"Play",
"(",
"self",
",",
"play_ds",
",",
"play_basedir",
")",
"matched_tags",
",",
"unmatched_tags",
"=",
"play",
".",
"compare_tags",
"(",
"self",
".",
"only_tags",
")",
"matched_tags_all",
"=",
"matched_tags_all",
"|",
"matched_tags",
"unmatched_tags_all",
"=",
"unmatched_tags_all",
"|",
"unmatched_tags",
"# if we have matched_tags, the play must be run.",
"# if the play contains no tasks, assume we just want to gather facts",
"if",
"(",
"len",
"(",
"matched_tags",
")",
">",
"0",
"or",
"len",
"(",
"play",
".",
"tasks",
"(",
")",
")",
"==",
"0",
")",
":",
"plays",
".",
"append",
"(",
"play",
")",
"# if the playbook is invoked with --tags that don't exist at all in the playbooks",
"# then we need to raise an error so that the user can correct the arguments.",
"unknown_tags",
"=",
"set",
"(",
"self",
".",
"only_tags",
")",
"-",
"(",
"matched_tags_all",
"|",
"unmatched_tags_all",
")",
"unknown_tags",
".",
"discard",
"(",
"'all'",
")",
"if",
"len",
"(",
"unknown_tags",
")",
">",
"0",
":",
"unmatched_tags_all",
".",
"discard",
"(",
"'all'",
")",
"msg",
"=",
"'tag(s) not found in playbook: %s. possible values: %s'",
"unknown",
"=",
"','",
".",
"join",
"(",
"sorted",
"(",
"unknown_tags",
")",
")",
"unmatched",
"=",
"','",
".",
"join",
"(",
"sorted",
"(",
"unmatched_tags_all",
")",
")",
"raise",
"errors",
".",
"AnsibleError",
"(",
"msg",
"%",
"(",
"unknown",
",",
"unmatched",
")",
")",
"for",
"play",
"in",
"plays",
":",
"if",
"not",
"self",
".",
"_run_play",
"(",
"play",
")",
":",
"break",
"# summarize the results",
"results",
"=",
"{",
"}",
"for",
"host",
"in",
"self",
".",
"stats",
".",
"processed",
".",
"keys",
"(",
")",
":",
"results",
"[",
"host",
"]",
"=",
"self",
".",
"stats",
".",
"summarize",
"(",
"host",
")",
"return",
"results"
] | run all patterns in the playbook | [
"run",
"all",
"patterns",
"in",
"the",
"playbook"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L190-L229 |
250,030 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._async_poll | def _async_poll(self, poller, async_seconds, async_poll_interval):
''' launch an async job, if poll_interval is set, wait for completion '''
results = poller.wait(async_seconds, async_poll_interval)
# mark any hosts that are still listed as started as failed
# since these likely got killed by async_wrapper
for host in poller.hosts_to_poll:
reason = { 'failed' : 1, 'rc' : None, 'msg' : 'timed out' }
self.runner_callbacks.on_failed(host, reason)
results['contacted'][host] = reason
return results | python | def _async_poll(self, poller, async_seconds, async_poll_interval):
''' launch an async job, if poll_interval is set, wait for completion '''
results = poller.wait(async_seconds, async_poll_interval)
# mark any hosts that are still listed as started as failed
# since these likely got killed by async_wrapper
for host in poller.hosts_to_poll:
reason = { 'failed' : 1, 'rc' : None, 'msg' : 'timed out' }
self.runner_callbacks.on_failed(host, reason)
results['contacted'][host] = reason
return results | [
"def",
"_async_poll",
"(",
"self",
",",
"poller",
",",
"async_seconds",
",",
"async_poll_interval",
")",
":",
"results",
"=",
"poller",
".",
"wait",
"(",
"async_seconds",
",",
"async_poll_interval",
")",
"# mark any hosts that are still listed as started as failed",
"# since these likely got killed by async_wrapper",
"for",
"host",
"in",
"poller",
".",
"hosts_to_poll",
":",
"reason",
"=",
"{",
"'failed'",
":",
"1",
",",
"'rc'",
":",
"None",
",",
"'msg'",
":",
"'timed out'",
"}",
"self",
".",
"runner_callbacks",
".",
"on_failed",
"(",
"host",
",",
"reason",
")",
"results",
"[",
"'contacted'",
"]",
"[",
"host",
"]",
"=",
"reason",
"return",
"results"
] | launch an async job, if poll_interval is set, wait for completion | [
"launch",
"an",
"async",
"job",
"if",
"poll_interval",
"is",
"set",
"wait",
"for",
"completion"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L233-L245 |
250,031 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._list_available_hosts | def _list_available_hosts(self, *args):
''' returns a list of hosts that haven't failed and aren't dark '''
return [ h for h in self.inventory.list_hosts(*args) if (h not in self.stats.failures) and (h not in self.stats.dark)] | python | def _list_available_hosts(self, *args):
''' returns a list of hosts that haven't failed and aren't dark '''
return [ h for h in self.inventory.list_hosts(*args) if (h not in self.stats.failures) and (h not in self.stats.dark)] | [
"def",
"_list_available_hosts",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"[",
"h",
"for",
"h",
"in",
"self",
".",
"inventory",
".",
"list_hosts",
"(",
"*",
"args",
")",
"if",
"(",
"h",
"not",
"in",
"self",
".",
"stats",
".",
"failures",
")",
"and",
"(",
"h",
"not",
"in",
"self",
".",
"stats",
".",
"dark",
")",
"]"
] | returns a list of hosts that haven't failed and aren't dark | [
"returns",
"a",
"list",
"of",
"hosts",
"that",
"haven",
"t",
"failed",
"and",
"aren",
"t",
"dark"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L249-L252 |
250,032 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._run_task_internal | def _run_task_internal(self, task):
''' run a particular module step in a playbook '''
hosts = self._list_available_hosts()
self.inventory.restrict_to(hosts)
runner = cirruscluster.ext.ansible.runner.Runner(
pattern=task.play.hosts, inventory=self.inventory, module_name=task.module_name,
module_args=task.module_args, forks=self.forks,
remote_pass=self.remote_pass, module_path=self.module_path,
timeout=self.timeout, remote_user=task.play.remote_user,
remote_port=task.play.remote_port, module_vars=task.module_vars,
private_key_file=self.private_key_file,
private_key=self.private_key,
setup_cache=self.SETUP_CACHE, basedir=task.play.basedir,
conditional=task.only_if, callbacks=self.runner_callbacks,
sudo=task.sudo, sudo_user=task.sudo_user,
transport=task.transport, sudo_pass=task.sudo_pass, is_playbook=True
)
if task.async_seconds == 0:
results = runner.run()
else:
results, poller = runner.run_async(task.async_seconds)
self.stats.compute(results)
if task.async_poll_interval > 0:
# if not polling, playbook requested fire and forget, so don't poll
results = self._async_poll(poller, task.async_seconds, task.async_poll_interval)
contacted = results.get('contacted',{})
dark = results.get('dark', {})
self.inventory.lift_restriction()
if len(contacted.keys()) == 0 and len(dark.keys()) == 0:
return None
return results | python | def _run_task_internal(self, task):
''' run a particular module step in a playbook '''
hosts = self._list_available_hosts()
self.inventory.restrict_to(hosts)
runner = cirruscluster.ext.ansible.runner.Runner(
pattern=task.play.hosts, inventory=self.inventory, module_name=task.module_name,
module_args=task.module_args, forks=self.forks,
remote_pass=self.remote_pass, module_path=self.module_path,
timeout=self.timeout, remote_user=task.play.remote_user,
remote_port=task.play.remote_port, module_vars=task.module_vars,
private_key_file=self.private_key_file,
private_key=self.private_key,
setup_cache=self.SETUP_CACHE, basedir=task.play.basedir,
conditional=task.only_if, callbacks=self.runner_callbacks,
sudo=task.sudo, sudo_user=task.sudo_user,
transport=task.transport, sudo_pass=task.sudo_pass, is_playbook=True
)
if task.async_seconds == 0:
results = runner.run()
else:
results, poller = runner.run_async(task.async_seconds)
self.stats.compute(results)
if task.async_poll_interval > 0:
# if not polling, playbook requested fire and forget, so don't poll
results = self._async_poll(poller, task.async_seconds, task.async_poll_interval)
contacted = results.get('contacted',{})
dark = results.get('dark', {})
self.inventory.lift_restriction()
if len(contacted.keys()) == 0 and len(dark.keys()) == 0:
return None
return results | [
"def",
"_run_task_internal",
"(",
"self",
",",
"task",
")",
":",
"hosts",
"=",
"self",
".",
"_list_available_hosts",
"(",
")",
"self",
".",
"inventory",
".",
"restrict_to",
"(",
"hosts",
")",
"runner",
"=",
"cirruscluster",
".",
"ext",
".",
"ansible",
".",
"runner",
".",
"Runner",
"(",
"pattern",
"=",
"task",
".",
"play",
".",
"hosts",
",",
"inventory",
"=",
"self",
".",
"inventory",
",",
"module_name",
"=",
"task",
".",
"module_name",
",",
"module_args",
"=",
"task",
".",
"module_args",
",",
"forks",
"=",
"self",
".",
"forks",
",",
"remote_pass",
"=",
"self",
".",
"remote_pass",
",",
"module_path",
"=",
"self",
".",
"module_path",
",",
"timeout",
"=",
"self",
".",
"timeout",
",",
"remote_user",
"=",
"task",
".",
"play",
".",
"remote_user",
",",
"remote_port",
"=",
"task",
".",
"play",
".",
"remote_port",
",",
"module_vars",
"=",
"task",
".",
"module_vars",
",",
"private_key_file",
"=",
"self",
".",
"private_key_file",
",",
"private_key",
"=",
"self",
".",
"private_key",
",",
"setup_cache",
"=",
"self",
".",
"SETUP_CACHE",
",",
"basedir",
"=",
"task",
".",
"play",
".",
"basedir",
",",
"conditional",
"=",
"task",
".",
"only_if",
",",
"callbacks",
"=",
"self",
".",
"runner_callbacks",
",",
"sudo",
"=",
"task",
".",
"sudo",
",",
"sudo_user",
"=",
"task",
".",
"sudo_user",
",",
"transport",
"=",
"task",
".",
"transport",
",",
"sudo_pass",
"=",
"task",
".",
"sudo_pass",
",",
"is_playbook",
"=",
"True",
")",
"if",
"task",
".",
"async_seconds",
"==",
"0",
":",
"results",
"=",
"runner",
".",
"run",
"(",
")",
"else",
":",
"results",
",",
"poller",
"=",
"runner",
".",
"run_async",
"(",
"task",
".",
"async_seconds",
")",
"self",
".",
"stats",
".",
"compute",
"(",
"results",
")",
"if",
"task",
".",
"async_poll_interval",
">",
"0",
":",
"# if not polling, playbook requested fire and forget, so don't poll",
"results",
"=",
"self",
".",
"_async_poll",
"(",
"poller",
",",
"task",
".",
"async_seconds",
",",
"task",
".",
"async_poll_interval",
")",
"contacted",
"=",
"results",
".",
"get",
"(",
"'contacted'",
",",
"{",
"}",
")",
"dark",
"=",
"results",
".",
"get",
"(",
"'dark'",
",",
"{",
"}",
")",
"self",
".",
"inventory",
".",
"lift_restriction",
"(",
")",
"if",
"len",
"(",
"contacted",
".",
"keys",
"(",
")",
")",
"==",
"0",
"and",
"len",
"(",
"dark",
".",
"keys",
"(",
")",
")",
"==",
"0",
":",
"return",
"None",
"return",
"results"
] | run a particular module step in a playbook | [
"run",
"a",
"particular",
"module",
"step",
"in",
"a",
"playbook"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L256-L293 |
250,033 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._run_task | def _run_task(self, play, task, is_handler):
''' run a single task in the playbook and recursively run any subtasks. '''
self.callbacks.on_task_start(utils.template(play.basedir, task.name, task.module_vars, lookup_fatal=False), is_handler)
# load up an appropriate ansible runner to run the task in parallel
results = self._run_task_internal(task)
# if no hosts are matched, carry on
hosts_remaining = True
if results is None:
hosts_remaining = False
results = {}
contacted = results.get('contacted', {})
self.stats.compute(results, ignore_errors=task.ignore_errors)
# add facts to the global setup cache
for host, result in contacted.iteritems():
# Skip register variable if host is skipped
if result.get('skipped', False):
continue
facts = result.get('ansible_facts', {})
self.SETUP_CACHE[host].update(facts)
# extra vars need to always trump - so update again following the facts
self.SETUP_CACHE[host].update(self.extra_vars)
if task.register:
if 'stdout' in result:
result['stdout_lines'] = result['stdout'].splitlines()
self.SETUP_CACHE[host][task.register] = result
# flag which notify handlers need to be run
if len(task.notify) > 0:
for host, results in results.get('contacted',{}).iteritems():
if results.get('changed', False):
for handler_name in task.notify:
self._flag_handler(play, utils.template(play.basedir, handler_name, task.module_vars), host)
return hosts_remaining | python | def _run_task(self, play, task, is_handler):
''' run a single task in the playbook and recursively run any subtasks. '''
self.callbacks.on_task_start(utils.template(play.basedir, task.name, task.module_vars, lookup_fatal=False), is_handler)
# load up an appropriate ansible runner to run the task in parallel
results = self._run_task_internal(task)
# if no hosts are matched, carry on
hosts_remaining = True
if results is None:
hosts_remaining = False
results = {}
contacted = results.get('contacted', {})
self.stats.compute(results, ignore_errors=task.ignore_errors)
# add facts to the global setup cache
for host, result in contacted.iteritems():
# Skip register variable if host is skipped
if result.get('skipped', False):
continue
facts = result.get('ansible_facts', {})
self.SETUP_CACHE[host].update(facts)
# extra vars need to always trump - so update again following the facts
self.SETUP_CACHE[host].update(self.extra_vars)
if task.register:
if 'stdout' in result:
result['stdout_lines'] = result['stdout'].splitlines()
self.SETUP_CACHE[host][task.register] = result
# flag which notify handlers need to be run
if len(task.notify) > 0:
for host, results in results.get('contacted',{}).iteritems():
if results.get('changed', False):
for handler_name in task.notify:
self._flag_handler(play, utils.template(play.basedir, handler_name, task.module_vars), host)
return hosts_remaining | [
"def",
"_run_task",
"(",
"self",
",",
"play",
",",
"task",
",",
"is_handler",
")",
":",
"self",
".",
"callbacks",
".",
"on_task_start",
"(",
"utils",
".",
"template",
"(",
"play",
".",
"basedir",
",",
"task",
".",
"name",
",",
"task",
".",
"module_vars",
",",
"lookup_fatal",
"=",
"False",
")",
",",
"is_handler",
")",
"# load up an appropriate ansible runner to run the task in parallel",
"results",
"=",
"self",
".",
"_run_task_internal",
"(",
"task",
")",
"# if no hosts are matched, carry on",
"hosts_remaining",
"=",
"True",
"if",
"results",
"is",
"None",
":",
"hosts_remaining",
"=",
"False",
"results",
"=",
"{",
"}",
"contacted",
"=",
"results",
".",
"get",
"(",
"'contacted'",
",",
"{",
"}",
")",
"self",
".",
"stats",
".",
"compute",
"(",
"results",
",",
"ignore_errors",
"=",
"task",
".",
"ignore_errors",
")",
"# add facts to the global setup cache",
"for",
"host",
",",
"result",
"in",
"contacted",
".",
"iteritems",
"(",
")",
":",
"# Skip register variable if host is skipped",
"if",
"result",
".",
"get",
"(",
"'skipped'",
",",
"False",
")",
":",
"continue",
"facts",
"=",
"result",
".",
"get",
"(",
"'ansible_facts'",
",",
"{",
"}",
")",
"self",
".",
"SETUP_CACHE",
"[",
"host",
"]",
".",
"update",
"(",
"facts",
")",
"# extra vars need to always trump - so update again following the facts",
"self",
".",
"SETUP_CACHE",
"[",
"host",
"]",
".",
"update",
"(",
"self",
".",
"extra_vars",
")",
"if",
"task",
".",
"register",
":",
"if",
"'stdout'",
"in",
"result",
":",
"result",
"[",
"'stdout_lines'",
"]",
"=",
"result",
"[",
"'stdout'",
"]",
".",
"splitlines",
"(",
")",
"self",
".",
"SETUP_CACHE",
"[",
"host",
"]",
"[",
"task",
".",
"register",
"]",
"=",
"result",
"# flag which notify handlers need to be run",
"if",
"len",
"(",
"task",
".",
"notify",
")",
">",
"0",
":",
"for",
"host",
",",
"results",
"in",
"results",
".",
"get",
"(",
"'contacted'",
",",
"{",
"}",
")",
".",
"iteritems",
"(",
")",
":",
"if",
"results",
".",
"get",
"(",
"'changed'",
",",
"False",
")",
":",
"for",
"handler_name",
"in",
"task",
".",
"notify",
":",
"self",
".",
"_flag_handler",
"(",
"play",
",",
"utils",
".",
"template",
"(",
"play",
".",
"basedir",
",",
"handler_name",
",",
"task",
".",
"module_vars",
")",
",",
"host",
")",
"return",
"hosts_remaining"
] | run a single task in the playbook and recursively run any subtasks. | [
"run",
"a",
"single",
"task",
"in",
"the",
"playbook",
"and",
"recursively",
"run",
"any",
"subtasks",
"."
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L297-L335 |
250,034 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._flag_handler | def _flag_handler(self, play, handler_name, host):
'''
if a task has any notify elements, flag handlers for run
at end of execution cycle for hosts that have indicated
changes have been made
'''
found = False
for x in play.handlers():
if handler_name == utils.template(play.basedir, x.name, x.module_vars):
found = True
self.callbacks.on_notify(host, x.name)
x.notified_by.append(host)
if not found:
raise errors.AnsibleError("change handler (%s) is not defined" % handler_name) | python | def _flag_handler(self, play, handler_name, host):
'''
if a task has any notify elements, flag handlers for run
at end of execution cycle for hosts that have indicated
changes have been made
'''
found = False
for x in play.handlers():
if handler_name == utils.template(play.basedir, x.name, x.module_vars):
found = True
self.callbacks.on_notify(host, x.name)
x.notified_by.append(host)
if not found:
raise errors.AnsibleError("change handler (%s) is not defined" % handler_name) | [
"def",
"_flag_handler",
"(",
"self",
",",
"play",
",",
"handler_name",
",",
"host",
")",
":",
"found",
"=",
"False",
"for",
"x",
"in",
"play",
".",
"handlers",
"(",
")",
":",
"if",
"handler_name",
"==",
"utils",
".",
"template",
"(",
"play",
".",
"basedir",
",",
"x",
".",
"name",
",",
"x",
".",
"module_vars",
")",
":",
"found",
"=",
"True",
"self",
".",
"callbacks",
".",
"on_notify",
"(",
"host",
",",
"x",
".",
"name",
")",
"x",
".",
"notified_by",
".",
"append",
"(",
"host",
")",
"if",
"not",
"found",
":",
"raise",
"errors",
".",
"AnsibleError",
"(",
"\"change handler (%s) is not defined\"",
"%",
"handler_name",
")"
] | if a task has any notify elements, flag handlers for run
at end of execution cycle for hosts that have indicated
changes have been made | [
"if",
"a",
"task",
"has",
"any",
"notify",
"elements",
"flag",
"handlers",
"for",
"run",
"at",
"end",
"of",
"execution",
"cycle",
"for",
"hosts",
"that",
"have",
"indicated",
"changes",
"have",
"been",
"made"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L339-L353 |
250,035 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._do_setup_step | def _do_setup_step(self, play):
''' get facts from the remote system '''
host_list = self._list_available_hosts(play.hosts)
if play.gather_facts is False:
return {}
elif play.gather_facts is None:
host_list = [h for h in host_list if h not in self.SETUP_CACHE or 'module_setup' not in self.SETUP_CACHE[h]]
if len(host_list) == 0:
return {}
self.callbacks.on_setup()
self.inventory.restrict_to(host_list)
# push any variables down to the system
setup_results = cirruscluster.ext.ansible.runner.Runner(
pattern=play.hosts, module_name='setup', module_args={}, inventory=self.inventory,
forks=self.forks, module_path=self.module_path, timeout=self.timeout, remote_user=play.remote_user,
remote_pass=self.remote_pass, remote_port=play.remote_port, private_key_file=self.private_key_file,
private_key=self.private_key,
setup_cache=self.SETUP_CACHE, callbacks=self.runner_callbacks, sudo=play.sudo, sudo_user=play.sudo_user,
transport=play.transport, sudo_pass=self.sudo_pass, is_playbook=True, module_vars=play.vars,
).run()
self.stats.compute(setup_results, setup=True)
self.inventory.lift_restriction()
# now for each result, load into the setup cache so we can
# let runner template out future commands
setup_ok = setup_results.get('contacted', {})
for (host, result) in setup_ok.iteritems():
self.SETUP_CACHE[host].update({'module_setup': True})
self.SETUP_CACHE[host].update(result.get('ansible_facts', {}))
return setup_results | python | def _do_setup_step(self, play):
''' get facts from the remote system '''
host_list = self._list_available_hosts(play.hosts)
if play.gather_facts is False:
return {}
elif play.gather_facts is None:
host_list = [h for h in host_list if h not in self.SETUP_CACHE or 'module_setup' not in self.SETUP_CACHE[h]]
if len(host_list) == 0:
return {}
self.callbacks.on_setup()
self.inventory.restrict_to(host_list)
# push any variables down to the system
setup_results = cirruscluster.ext.ansible.runner.Runner(
pattern=play.hosts, module_name='setup', module_args={}, inventory=self.inventory,
forks=self.forks, module_path=self.module_path, timeout=self.timeout, remote_user=play.remote_user,
remote_pass=self.remote_pass, remote_port=play.remote_port, private_key_file=self.private_key_file,
private_key=self.private_key,
setup_cache=self.SETUP_CACHE, callbacks=self.runner_callbacks, sudo=play.sudo, sudo_user=play.sudo_user,
transport=play.transport, sudo_pass=self.sudo_pass, is_playbook=True, module_vars=play.vars,
).run()
self.stats.compute(setup_results, setup=True)
self.inventory.lift_restriction()
# now for each result, load into the setup cache so we can
# let runner template out future commands
setup_ok = setup_results.get('contacted', {})
for (host, result) in setup_ok.iteritems():
self.SETUP_CACHE[host].update({'module_setup': True})
self.SETUP_CACHE[host].update(result.get('ansible_facts', {}))
return setup_results | [
"def",
"_do_setup_step",
"(",
"self",
",",
"play",
")",
":",
"host_list",
"=",
"self",
".",
"_list_available_hosts",
"(",
"play",
".",
"hosts",
")",
"if",
"play",
".",
"gather_facts",
"is",
"False",
":",
"return",
"{",
"}",
"elif",
"play",
".",
"gather_facts",
"is",
"None",
":",
"host_list",
"=",
"[",
"h",
"for",
"h",
"in",
"host_list",
"if",
"h",
"not",
"in",
"self",
".",
"SETUP_CACHE",
"or",
"'module_setup'",
"not",
"in",
"self",
".",
"SETUP_CACHE",
"[",
"h",
"]",
"]",
"if",
"len",
"(",
"host_list",
")",
"==",
"0",
":",
"return",
"{",
"}",
"self",
".",
"callbacks",
".",
"on_setup",
"(",
")",
"self",
".",
"inventory",
".",
"restrict_to",
"(",
"host_list",
")",
"# push any variables down to the system",
"setup_results",
"=",
"cirruscluster",
".",
"ext",
".",
"ansible",
".",
"runner",
".",
"Runner",
"(",
"pattern",
"=",
"play",
".",
"hosts",
",",
"module_name",
"=",
"'setup'",
",",
"module_args",
"=",
"{",
"}",
",",
"inventory",
"=",
"self",
".",
"inventory",
",",
"forks",
"=",
"self",
".",
"forks",
",",
"module_path",
"=",
"self",
".",
"module_path",
",",
"timeout",
"=",
"self",
".",
"timeout",
",",
"remote_user",
"=",
"play",
".",
"remote_user",
",",
"remote_pass",
"=",
"self",
".",
"remote_pass",
",",
"remote_port",
"=",
"play",
".",
"remote_port",
",",
"private_key_file",
"=",
"self",
".",
"private_key_file",
",",
"private_key",
"=",
"self",
".",
"private_key",
",",
"setup_cache",
"=",
"self",
".",
"SETUP_CACHE",
",",
"callbacks",
"=",
"self",
".",
"runner_callbacks",
",",
"sudo",
"=",
"play",
".",
"sudo",
",",
"sudo_user",
"=",
"play",
".",
"sudo_user",
",",
"transport",
"=",
"play",
".",
"transport",
",",
"sudo_pass",
"=",
"self",
".",
"sudo_pass",
",",
"is_playbook",
"=",
"True",
",",
"module_vars",
"=",
"play",
".",
"vars",
",",
")",
".",
"run",
"(",
")",
"self",
".",
"stats",
".",
"compute",
"(",
"setup_results",
",",
"setup",
"=",
"True",
")",
"self",
".",
"inventory",
".",
"lift_restriction",
"(",
")",
"# now for each result, load into the setup cache so we can",
"# let runner template out future commands",
"setup_ok",
"=",
"setup_results",
".",
"get",
"(",
"'contacted'",
",",
"{",
"}",
")",
"for",
"(",
"host",
",",
"result",
")",
"in",
"setup_ok",
".",
"iteritems",
"(",
")",
":",
"self",
".",
"SETUP_CACHE",
"[",
"host",
"]",
".",
"update",
"(",
"{",
"'module_setup'",
":",
"True",
"}",
")",
"self",
".",
"SETUP_CACHE",
"[",
"host",
"]",
".",
"update",
"(",
"result",
".",
"get",
"(",
"'ansible_facts'",
",",
"{",
"}",
")",
")",
"return",
"setup_results"
] | get facts from the remote system | [
"get",
"facts",
"from",
"the",
"remote",
"system"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L357-L391 |
250,036 | cirruscluster/cirruscluster | cirruscluster/ext/ansible/playbook/__init__.py | PlayBook._run_play | def _run_play(self, play):
''' run a list of tasks for a given pattern, in order '''
self.callbacks.on_play_start(play.name)
# if no hosts matches this play, drop out
if not self.inventory.list_hosts(play.hosts):
self.callbacks.on_no_hosts_matched()
return True
# get facts from system
self._do_setup_step(play)
# now with that data, handle contentional variable file imports!
all_hosts = self._list_available_hosts(play.hosts)
play.update_vars_files(all_hosts)
serialized_batch = []
if play.serial <= 0:
serialized_batch = [all_hosts]
else:
# do N forks all the way through before moving to next
while len(all_hosts) > 0:
play_hosts = []
for x in range(play.serial):
if len(all_hosts) > 0:
play_hosts.append(all_hosts.pop())
serialized_batch.append(play_hosts)
for on_hosts in serialized_batch:
self.inventory.also_restrict_to(on_hosts)
for task in play.tasks():
# only run the task if the requested tags match
should_run = False
for x in self.only_tags:
for y in task.tags:
if (x==y):
should_run = True
break
if should_run:
if not self._run_task(play, task, False):
# whether no hosts matched is fatal or not depends if it was on the initial step.
# if we got exactly no hosts on the first step (setup!) then the host group
# just didn't match anything and that's ok
return False
host_list = self._list_available_hosts(play.hosts)
# if no hosts remain, drop out
if not host_list:
self.callbacks.on_no_hosts_remaining()
return False
# run notify actions
for handler in play.handlers():
if len(handler.notified_by) > 0:
self.inventory.restrict_to(handler.notified_by)
self._run_task(play, handler, True)
self.inventory.lift_restriction()
handler.notified_by = []
self.inventory.lift_also_restriction()
return True | python | def _run_play(self, play):
''' run a list of tasks for a given pattern, in order '''
self.callbacks.on_play_start(play.name)
# if no hosts matches this play, drop out
if not self.inventory.list_hosts(play.hosts):
self.callbacks.on_no_hosts_matched()
return True
# get facts from system
self._do_setup_step(play)
# now with that data, handle contentional variable file imports!
all_hosts = self._list_available_hosts(play.hosts)
play.update_vars_files(all_hosts)
serialized_batch = []
if play.serial <= 0:
serialized_batch = [all_hosts]
else:
# do N forks all the way through before moving to next
while len(all_hosts) > 0:
play_hosts = []
for x in range(play.serial):
if len(all_hosts) > 0:
play_hosts.append(all_hosts.pop())
serialized_batch.append(play_hosts)
for on_hosts in serialized_batch:
self.inventory.also_restrict_to(on_hosts)
for task in play.tasks():
# only run the task if the requested tags match
should_run = False
for x in self.only_tags:
for y in task.tags:
if (x==y):
should_run = True
break
if should_run:
if not self._run_task(play, task, False):
# whether no hosts matched is fatal or not depends if it was on the initial step.
# if we got exactly no hosts on the first step (setup!) then the host group
# just didn't match anything and that's ok
return False
host_list = self._list_available_hosts(play.hosts)
# if no hosts remain, drop out
if not host_list:
self.callbacks.on_no_hosts_remaining()
return False
# run notify actions
for handler in play.handlers():
if len(handler.notified_by) > 0:
self.inventory.restrict_to(handler.notified_by)
self._run_task(play, handler, True)
self.inventory.lift_restriction()
handler.notified_by = []
self.inventory.lift_also_restriction()
return True | [
"def",
"_run_play",
"(",
"self",
",",
"play",
")",
":",
"self",
".",
"callbacks",
".",
"on_play_start",
"(",
"play",
".",
"name",
")",
"# if no hosts matches this play, drop out",
"if",
"not",
"self",
".",
"inventory",
".",
"list_hosts",
"(",
"play",
".",
"hosts",
")",
":",
"self",
".",
"callbacks",
".",
"on_no_hosts_matched",
"(",
")",
"return",
"True",
"# get facts from system",
"self",
".",
"_do_setup_step",
"(",
"play",
")",
"# now with that data, handle contentional variable file imports!",
"all_hosts",
"=",
"self",
".",
"_list_available_hosts",
"(",
"play",
".",
"hosts",
")",
"play",
".",
"update_vars_files",
"(",
"all_hosts",
")",
"serialized_batch",
"=",
"[",
"]",
"if",
"play",
".",
"serial",
"<=",
"0",
":",
"serialized_batch",
"=",
"[",
"all_hosts",
"]",
"else",
":",
"# do N forks all the way through before moving to next",
"while",
"len",
"(",
"all_hosts",
")",
">",
"0",
":",
"play_hosts",
"=",
"[",
"]",
"for",
"x",
"in",
"range",
"(",
"play",
".",
"serial",
")",
":",
"if",
"len",
"(",
"all_hosts",
")",
">",
"0",
":",
"play_hosts",
".",
"append",
"(",
"all_hosts",
".",
"pop",
"(",
")",
")",
"serialized_batch",
".",
"append",
"(",
"play_hosts",
")",
"for",
"on_hosts",
"in",
"serialized_batch",
":",
"self",
".",
"inventory",
".",
"also_restrict_to",
"(",
"on_hosts",
")",
"for",
"task",
"in",
"play",
".",
"tasks",
"(",
")",
":",
"# only run the task if the requested tags match",
"should_run",
"=",
"False",
"for",
"x",
"in",
"self",
".",
"only_tags",
":",
"for",
"y",
"in",
"task",
".",
"tags",
":",
"if",
"(",
"x",
"==",
"y",
")",
":",
"should_run",
"=",
"True",
"break",
"if",
"should_run",
":",
"if",
"not",
"self",
".",
"_run_task",
"(",
"play",
",",
"task",
",",
"False",
")",
":",
"# whether no hosts matched is fatal or not depends if it was on the initial step.",
"# if we got exactly no hosts on the first step (setup!) then the host group",
"# just didn't match anything and that's ok",
"return",
"False",
"host_list",
"=",
"self",
".",
"_list_available_hosts",
"(",
"play",
".",
"hosts",
")",
"# if no hosts remain, drop out",
"if",
"not",
"host_list",
":",
"self",
".",
"callbacks",
".",
"on_no_hosts_remaining",
"(",
")",
"return",
"False",
"# run notify actions",
"for",
"handler",
"in",
"play",
".",
"handlers",
"(",
")",
":",
"if",
"len",
"(",
"handler",
".",
"notified_by",
")",
">",
"0",
":",
"self",
".",
"inventory",
".",
"restrict_to",
"(",
"handler",
".",
"notified_by",
")",
"self",
".",
"_run_task",
"(",
"play",
",",
"handler",
",",
"True",
")",
"self",
".",
"inventory",
".",
"lift_restriction",
"(",
")",
"handler",
".",
"notified_by",
"=",
"[",
"]",
"self",
".",
"inventory",
".",
"lift_also_restriction",
"(",
")",
"return",
"True"
] | run a list of tasks for a given pattern, in order | [
"run",
"a",
"list",
"of",
"tasks",
"for",
"a",
"given",
"pattern",
"in",
"order"
] | 977409929dd81322d886425cdced10608117d5d7 | https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/ext/ansible/playbook/__init__.py#L395-L461 |
250,037 | bwesterb/mirte | src/main.py | parse_cmdLine_instructions | def parse_cmdLine_instructions(args):
""" Parses command-line arguments. These are
instruction to the manager to create instances and
put settings. """
instructions = dict()
rargs = list()
for arg in args:
if arg[:2] == '--':
tmp = arg[2:]
bits = tmp.split('=', 1)
if len(bits) == 1:
bits.append('')
instructions[bits[0]] = bits[1]
else:
rargs.append(arg)
return instructions, rargs | python | def parse_cmdLine_instructions(args):
""" Parses command-line arguments. These are
instruction to the manager to create instances and
put settings. """
instructions = dict()
rargs = list()
for arg in args:
if arg[:2] == '--':
tmp = arg[2:]
bits = tmp.split('=', 1)
if len(bits) == 1:
bits.append('')
instructions[bits[0]] = bits[1]
else:
rargs.append(arg)
return instructions, rargs | [
"def",
"parse_cmdLine_instructions",
"(",
"args",
")",
":",
"instructions",
"=",
"dict",
"(",
")",
"rargs",
"=",
"list",
"(",
")",
"for",
"arg",
"in",
"args",
":",
"if",
"arg",
"[",
":",
"2",
"]",
"==",
"'--'",
":",
"tmp",
"=",
"arg",
"[",
"2",
":",
"]",
"bits",
"=",
"tmp",
".",
"split",
"(",
"'='",
",",
"1",
")",
"if",
"len",
"(",
"bits",
")",
"==",
"1",
":",
"bits",
".",
"append",
"(",
"''",
")",
"instructions",
"[",
"bits",
"[",
"0",
"]",
"]",
"=",
"bits",
"[",
"1",
"]",
"else",
":",
"rargs",
".",
"append",
"(",
"arg",
")",
"return",
"instructions",
",",
"rargs"
] | Parses command-line arguments. These are
instruction to the manager to create instances and
put settings. | [
"Parses",
"command",
"-",
"line",
"arguments",
".",
"These",
"are",
"instruction",
"to",
"the",
"manager",
"to",
"create",
"instances",
"and",
"put",
"settings",
"."
] | c58db8c993cd15ffdc64b52703cd466213913200 | https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/main.py#L14-L29 |
250,038 | bwesterb/mirte | src/main.py | main | def main():
""" Entry-point """
sarah.coloredLogging.basicConfig(level=logging.DEBUG,
formatter=MirteFormatter())
l = logging.getLogger('mirte')
instructions, args = parse_cmdLine_instructions(sys.argv[1:])
m = Manager(l)
load_mirteFile(args[0] if args else 'default', m, logger=l)
execute_cmdLine_instructions(instructions, m, l)
m.run() | python | def main():
""" Entry-point """
sarah.coloredLogging.basicConfig(level=logging.DEBUG,
formatter=MirteFormatter())
l = logging.getLogger('mirte')
instructions, args = parse_cmdLine_instructions(sys.argv[1:])
m = Manager(l)
load_mirteFile(args[0] if args else 'default', m, logger=l)
execute_cmdLine_instructions(instructions, m, l)
m.run() | [
"def",
"main",
"(",
")",
":",
"sarah",
".",
"coloredLogging",
".",
"basicConfig",
"(",
"level",
"=",
"logging",
".",
"DEBUG",
",",
"formatter",
"=",
"MirteFormatter",
"(",
")",
")",
"l",
"=",
"logging",
".",
"getLogger",
"(",
"'mirte'",
")",
"instructions",
",",
"args",
"=",
"parse_cmdLine_instructions",
"(",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
")",
"m",
"=",
"Manager",
"(",
"l",
")",
"load_mirteFile",
"(",
"args",
"[",
"0",
"]",
"if",
"args",
"else",
"'default'",
",",
"m",
",",
"logger",
"=",
"l",
")",
"execute_cmdLine_instructions",
"(",
"instructions",
",",
"m",
",",
"l",
")",
"m",
".",
"run",
"(",
")"
] | Entry-point | [
"Entry",
"-",
"point"
] | c58db8c993cd15ffdc64b52703cd466213913200 | https://github.com/bwesterb/mirte/blob/c58db8c993cd15ffdc64b52703cd466213913200/src/main.py#L86-L95 |
250,039 | minhhoit/yacms | yacms/pages/middleware.py | PageMiddleware.installed | def installed(cls):
"""
Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass.
"""
try:
return cls._installed
except AttributeError:
name = "yacms.pages.middleware.PageMiddleware"
mw_setting = get_middleware_setting()
installed = name in mw_setting
if not installed:
for name in mw_setting:
if issubclass(import_dotted_path(name), cls):
installed = True
break
setattr(cls, "_installed", installed)
return installed | python | def installed(cls):
"""
Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass.
"""
try:
return cls._installed
except AttributeError:
name = "yacms.pages.middleware.PageMiddleware"
mw_setting = get_middleware_setting()
installed = name in mw_setting
if not installed:
for name in mw_setting:
if issubclass(import_dotted_path(name), cls):
installed = True
break
setattr(cls, "_installed", installed)
return installed | [
"def",
"installed",
"(",
"cls",
")",
":",
"try",
":",
"return",
"cls",
".",
"_installed",
"except",
"AttributeError",
":",
"name",
"=",
"\"yacms.pages.middleware.PageMiddleware\"",
"mw_setting",
"=",
"get_middleware_setting",
"(",
")",
"installed",
"=",
"name",
"in",
"mw_setting",
"if",
"not",
"installed",
":",
"for",
"name",
"in",
"mw_setting",
":",
"if",
"issubclass",
"(",
"import_dotted_path",
"(",
"name",
")",
",",
"cls",
")",
":",
"installed",
"=",
"True",
"break",
"setattr",
"(",
"cls",
",",
"\"_installed\"",
",",
"installed",
")",
"return",
"installed"
] | Used in ``yacms.pages.views.page`` to ensure
``PageMiddleware`` or a subclass has been installed. We cache
the result on the ``PageMiddleware._installed`` to only run
this once. Short path is to just check for the dotted path to
``PageMiddleware`` in ``MIDDLEWARE_CLASSES`` - if not found,
we need to load each middleware class to match a subclass. | [
"Used",
"in",
"yacms",
".",
"pages",
".",
"views",
".",
"page",
"to",
"ensure",
"PageMiddleware",
"or",
"a",
"subclass",
"has",
"been",
"installed",
".",
"We",
"cache",
"the",
"result",
"on",
"the",
"PageMiddleware",
".",
"_installed",
"to",
"only",
"run",
"this",
"once",
".",
"Short",
"path",
"is",
"to",
"just",
"check",
"for",
"the",
"dotted",
"path",
"to",
"PageMiddleware",
"in",
"MIDDLEWARE_CLASSES",
"-",
"if",
"not",
"found",
"we",
"need",
"to",
"load",
"each",
"middleware",
"class",
"to",
"match",
"a",
"subclass",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/middleware.py#L43-L64 |
250,040 | minhhoit/yacms | yacms/pages/middleware.py | PageMiddleware.process_view | def process_view(self, request, view_func, view_args, view_kwargs):
"""
Per-request mechanics for the current page object.
"""
# Load the closest matching page by slug, and assign it to the
# request object. If none found, skip all further processing.
slug = path_to_slug(request.path_info)
pages = Page.objects.with_ascendants_for_slug(slug,
for_user=request.user, include_login_required=True)
if pages:
page = pages[0]
setattr(request, "page", page)
context_processors.page(request)
else:
return
# Handle ``page.login_required``.
if page.login_required and not request.user.is_authenticated():
return redirect_to_login(request.get_full_path())
# If the view isn't yacms's page view, try to return the result
# immediately. In the case of a 404 with an URL slug that matches a
# page exactly, swallow the exception and try yacms's page view.
#
# This allows us to set up pages with URLs that also match non-page
# urlpatterns. For example, a page could be created with the URL
# /blog/about/, which would match the blog urlpattern, and assuming
# there wasn't a blog post with the slug "about", would raise a 404
# and subsequently be rendered by yacms's page view.
if view_func != page_view:
try:
return view_func(request, *view_args, **view_kwargs)
except Http404:
if page.slug != slug:
raise
# Run page processors.
extra_context = {}
model_processors = page_processors.processors[page.content_model]
slug_processors = page_processors.processors["slug:%s" % page.slug]
for (processor, exact_page) in slug_processors + model_processors:
if exact_page and not page.is_current:
continue
processor_response = processor(request, page)
if isinstance(processor_response, HttpResponse):
return processor_response
elif processor_response:
try:
for k, v in processor_response.items():
if k not in extra_context:
extra_context[k] = v
except (TypeError, ValueError):
name = "%s.%s" % (processor.__module__, processor.__name__)
error = ("The page processor %s returned %s but must "
"return HttpResponse or dict." %
(name, type(processor_response)))
raise ValueError(error)
return page_view(request, slug, extra_context=extra_context) | python | def process_view(self, request, view_func, view_args, view_kwargs):
"""
Per-request mechanics for the current page object.
"""
# Load the closest matching page by slug, and assign it to the
# request object. If none found, skip all further processing.
slug = path_to_slug(request.path_info)
pages = Page.objects.with_ascendants_for_slug(slug,
for_user=request.user, include_login_required=True)
if pages:
page = pages[0]
setattr(request, "page", page)
context_processors.page(request)
else:
return
# Handle ``page.login_required``.
if page.login_required and not request.user.is_authenticated():
return redirect_to_login(request.get_full_path())
# If the view isn't yacms's page view, try to return the result
# immediately. In the case of a 404 with an URL slug that matches a
# page exactly, swallow the exception and try yacms's page view.
#
# This allows us to set up pages with URLs that also match non-page
# urlpatterns. For example, a page could be created with the URL
# /blog/about/, which would match the blog urlpattern, and assuming
# there wasn't a blog post with the slug "about", would raise a 404
# and subsequently be rendered by yacms's page view.
if view_func != page_view:
try:
return view_func(request, *view_args, **view_kwargs)
except Http404:
if page.slug != slug:
raise
# Run page processors.
extra_context = {}
model_processors = page_processors.processors[page.content_model]
slug_processors = page_processors.processors["slug:%s" % page.slug]
for (processor, exact_page) in slug_processors + model_processors:
if exact_page and not page.is_current:
continue
processor_response = processor(request, page)
if isinstance(processor_response, HttpResponse):
return processor_response
elif processor_response:
try:
for k, v in processor_response.items():
if k not in extra_context:
extra_context[k] = v
except (TypeError, ValueError):
name = "%s.%s" % (processor.__module__, processor.__name__)
error = ("The page processor %s returned %s but must "
"return HttpResponse or dict." %
(name, type(processor_response)))
raise ValueError(error)
return page_view(request, slug, extra_context=extra_context) | [
"def",
"process_view",
"(",
"self",
",",
"request",
",",
"view_func",
",",
"view_args",
",",
"view_kwargs",
")",
":",
"# Load the closest matching page by slug, and assign it to the",
"# request object. If none found, skip all further processing.",
"slug",
"=",
"path_to_slug",
"(",
"request",
".",
"path_info",
")",
"pages",
"=",
"Page",
".",
"objects",
".",
"with_ascendants_for_slug",
"(",
"slug",
",",
"for_user",
"=",
"request",
".",
"user",
",",
"include_login_required",
"=",
"True",
")",
"if",
"pages",
":",
"page",
"=",
"pages",
"[",
"0",
"]",
"setattr",
"(",
"request",
",",
"\"page\"",
",",
"page",
")",
"context_processors",
".",
"page",
"(",
"request",
")",
"else",
":",
"return",
"# Handle ``page.login_required``.",
"if",
"page",
".",
"login_required",
"and",
"not",
"request",
".",
"user",
".",
"is_authenticated",
"(",
")",
":",
"return",
"redirect_to_login",
"(",
"request",
".",
"get_full_path",
"(",
")",
")",
"# If the view isn't yacms's page view, try to return the result",
"# immediately. In the case of a 404 with an URL slug that matches a",
"# page exactly, swallow the exception and try yacms's page view.",
"#",
"# This allows us to set up pages with URLs that also match non-page",
"# urlpatterns. For example, a page could be created with the URL",
"# /blog/about/, which would match the blog urlpattern, and assuming",
"# there wasn't a blog post with the slug \"about\", would raise a 404",
"# and subsequently be rendered by yacms's page view.",
"if",
"view_func",
"!=",
"page_view",
":",
"try",
":",
"return",
"view_func",
"(",
"request",
",",
"*",
"view_args",
",",
"*",
"*",
"view_kwargs",
")",
"except",
"Http404",
":",
"if",
"page",
".",
"slug",
"!=",
"slug",
":",
"raise",
"# Run page processors.",
"extra_context",
"=",
"{",
"}",
"model_processors",
"=",
"page_processors",
".",
"processors",
"[",
"page",
".",
"content_model",
"]",
"slug_processors",
"=",
"page_processors",
".",
"processors",
"[",
"\"slug:%s\"",
"%",
"page",
".",
"slug",
"]",
"for",
"(",
"processor",
",",
"exact_page",
")",
"in",
"slug_processors",
"+",
"model_processors",
":",
"if",
"exact_page",
"and",
"not",
"page",
".",
"is_current",
":",
"continue",
"processor_response",
"=",
"processor",
"(",
"request",
",",
"page",
")",
"if",
"isinstance",
"(",
"processor_response",
",",
"HttpResponse",
")",
":",
"return",
"processor_response",
"elif",
"processor_response",
":",
"try",
":",
"for",
"k",
",",
"v",
"in",
"processor_response",
".",
"items",
"(",
")",
":",
"if",
"k",
"not",
"in",
"extra_context",
":",
"extra_context",
"[",
"k",
"]",
"=",
"v",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"name",
"=",
"\"%s.%s\"",
"%",
"(",
"processor",
".",
"__module__",
",",
"processor",
".",
"__name__",
")",
"error",
"=",
"(",
"\"The page processor %s returned %s but must \"",
"\"return HttpResponse or dict.\"",
"%",
"(",
"name",
",",
"type",
"(",
"processor_response",
")",
")",
")",
"raise",
"ValueError",
"(",
"error",
")",
"return",
"page_view",
"(",
"request",
",",
"slug",
",",
"extra_context",
"=",
"extra_context",
")"
] | Per-request mechanics for the current page object. | [
"Per",
"-",
"request",
"mechanics",
"for",
"the",
"current",
"page",
"object",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/pages/middleware.py#L66-L125 |
250,041 | tomprince/nomenclature | nomenclature/syscalls.py | unshare | def unshare(flags):
"""
Disassociate parts of the process execution context.
:param flags int: A bitmask that specifies which parts of the execution
context should be unshared.
"""
res = lib.unshare(flags)
if res != 0:
_check_error(ffi.errno) | python | def unshare(flags):
"""
Disassociate parts of the process execution context.
:param flags int: A bitmask that specifies which parts of the execution
context should be unshared.
"""
res = lib.unshare(flags)
if res != 0:
_check_error(ffi.errno) | [
"def",
"unshare",
"(",
"flags",
")",
":",
"res",
"=",
"lib",
".",
"unshare",
"(",
"flags",
")",
"if",
"res",
"!=",
"0",
":",
"_check_error",
"(",
"ffi",
".",
"errno",
")"
] | Disassociate parts of the process execution context.
:param flags int: A bitmask that specifies which parts of the execution
context should be unshared. | [
"Disassociate",
"parts",
"of",
"the",
"process",
"execution",
"context",
"."
] | 81af4a590034f75211f028d485c0d83fceda5af2 | https://github.com/tomprince/nomenclature/blob/81af4a590034f75211f028d485c0d83fceda5af2/nomenclature/syscalls.py#L15-L24 |
250,042 | tomprince/nomenclature | nomenclature/syscalls.py | setns | def setns(fd, nstype):
"""
Reassociate thread with a namespace
:param fd int: The file descriptor referreing to one of the namespace
entries in a :directory::`/proc/<pid>/ns/` directory.
:param nstype int: The type of namespace the calling thread should be
reasscoiated with.
"""
res = lib.setns(fd, nstype)
if res != 0:
_check_error(ffi.errno) | python | def setns(fd, nstype):
"""
Reassociate thread with a namespace
:param fd int: The file descriptor referreing to one of the namespace
entries in a :directory::`/proc/<pid>/ns/` directory.
:param nstype int: The type of namespace the calling thread should be
reasscoiated with.
"""
res = lib.setns(fd, nstype)
if res != 0:
_check_error(ffi.errno) | [
"def",
"setns",
"(",
"fd",
",",
"nstype",
")",
":",
"res",
"=",
"lib",
".",
"setns",
"(",
"fd",
",",
"nstype",
")",
"if",
"res",
"!=",
"0",
":",
"_check_error",
"(",
"ffi",
".",
"errno",
")"
] | Reassociate thread with a namespace
:param fd int: The file descriptor referreing to one of the namespace
entries in a :directory::`/proc/<pid>/ns/` directory.
:param nstype int: The type of namespace the calling thread should be
reasscoiated with. | [
"Reassociate",
"thread",
"with",
"a",
"namespace"
] | 81af4a590034f75211f028d485c0d83fceda5af2 | https://github.com/tomprince/nomenclature/blob/81af4a590034f75211f028d485c0d83fceda5af2/nomenclature/syscalls.py#L27-L38 |
250,043 | xtrementl/focus | focus/task.py | Task._reset | def _reset(self):
""" Resets class properties.
"""
self._name = None
self._start_time = None
self._owner = os.getuid()
self._paths['task_dir'] = None
self._paths['task_config'] = None
self._loaded = False | python | def _reset(self):
""" Resets class properties.
"""
self._name = None
self._start_time = None
self._owner = os.getuid()
self._paths['task_dir'] = None
self._paths['task_config'] = None
self._loaded = False | [
"def",
"_reset",
"(",
"self",
")",
":",
"self",
".",
"_name",
"=",
"None",
"self",
".",
"_start_time",
"=",
"None",
"self",
".",
"_owner",
"=",
"os",
".",
"getuid",
"(",
")",
"self",
".",
"_paths",
"[",
"'task_dir'",
"]",
"=",
"None",
"self",
".",
"_paths",
"[",
"'task_config'",
"]",
"=",
"None",
"self",
".",
"_loaded",
"=",
"False"
] | Resets class properties. | [
"Resets",
"class",
"properties",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L51-L60 |
250,044 | xtrementl/focus | focus/task.py | Task._save_active_file | def _save_active_file(self):
""" Saves current task information to active file.
Example format::
active_task {
name "task name";
start_time "2012-04-23 15:18:22";
}
"""
_parser = parser.SettingParser()
# add name
_parser.add_option(None, 'name', common.to_utf8(self._name))
# add start time
start_time = self._start_time.strftime('%Y-%m-%d %H:%M:%S.%f')
_parser.add_option(None, 'start_time', start_time)
# write it to file
return _parser.write(self._paths['active_file'],
self.HEADER_ACTIVE_FILE) | python | def _save_active_file(self):
""" Saves current task information to active file.
Example format::
active_task {
name "task name";
start_time "2012-04-23 15:18:22";
}
"""
_parser = parser.SettingParser()
# add name
_parser.add_option(None, 'name', common.to_utf8(self._name))
# add start time
start_time = self._start_time.strftime('%Y-%m-%d %H:%M:%S.%f')
_parser.add_option(None, 'start_time', start_time)
# write it to file
return _parser.write(self._paths['active_file'],
self.HEADER_ACTIVE_FILE) | [
"def",
"_save_active_file",
"(",
"self",
")",
":",
"_parser",
"=",
"parser",
".",
"SettingParser",
"(",
")",
"# add name",
"_parser",
".",
"add_option",
"(",
"None",
",",
"'name'",
",",
"common",
".",
"to_utf8",
"(",
"self",
".",
"_name",
")",
")",
"# add start time",
"start_time",
"=",
"self",
".",
"_start_time",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S.%f'",
")",
"_parser",
".",
"add_option",
"(",
"None",
",",
"'start_time'",
",",
"start_time",
")",
"# write it to file",
"return",
"_parser",
".",
"write",
"(",
"self",
".",
"_paths",
"[",
"'active_file'",
"]",
",",
"self",
".",
"HEADER_ACTIVE_FILE",
")"
] | Saves current task information to active file.
Example format::
active_task {
name "task name";
start_time "2012-04-23 15:18:22";
} | [
"Saves",
"current",
"task",
"information",
"to",
"active",
"file",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L73-L94 |
250,045 | xtrementl/focus | focus/task.py | Task._clean_prior | def _clean_prior(self):
""" Cleans up from a previous task that didn't exit cleanly.
Returns ``True`` if previous task was cleaned.
"""
if self._loaded:
try:
pid_file = daemon.get_daemon_pidfile(self)
# check if it exists so we don't raise
if os.path.isfile(pid_file):
# read pid from file
pid = int(common.readfile(pid_file))
# check if pid file is stale
if pid and not daemon.pid_exists(pid):
common.safe_remove_file(pid_file)
raise ValueError
except (ValueError, TypeError):
self._clean()
return True
return False | python | def _clean_prior(self):
""" Cleans up from a previous task that didn't exit cleanly.
Returns ``True`` if previous task was cleaned.
"""
if self._loaded:
try:
pid_file = daemon.get_daemon_pidfile(self)
# check if it exists so we don't raise
if os.path.isfile(pid_file):
# read pid from file
pid = int(common.readfile(pid_file))
# check if pid file is stale
if pid and not daemon.pid_exists(pid):
common.safe_remove_file(pid_file)
raise ValueError
except (ValueError, TypeError):
self._clean()
return True
return False | [
"def",
"_clean_prior",
"(",
"self",
")",
":",
"if",
"self",
".",
"_loaded",
":",
"try",
":",
"pid_file",
"=",
"daemon",
".",
"get_daemon_pidfile",
"(",
"self",
")",
"# check if it exists so we don't raise",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"pid_file",
")",
":",
"# read pid from file",
"pid",
"=",
"int",
"(",
"common",
".",
"readfile",
"(",
"pid_file",
")",
")",
"# check if pid file is stale",
"if",
"pid",
"and",
"not",
"daemon",
".",
"pid_exists",
"(",
"pid",
")",
":",
"common",
".",
"safe_remove_file",
"(",
"pid_file",
")",
"raise",
"ValueError",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"self",
".",
"_clean",
"(",
")",
"return",
"True",
"return",
"False"
] | Cleans up from a previous task that didn't exit cleanly.
Returns ``True`` if previous task was cleaned. | [
"Cleans",
"up",
"from",
"a",
"previous",
"task",
"that",
"didn",
"t",
"exit",
"cleanly",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L96-L120 |
250,046 | xtrementl/focus | focus/task.py | Task.load | def load(self):
""" Loads a task if the active file is available.
"""
try:
_parser = parser.parse_config(self._paths['active_file'],
self.HEADER_ACTIVE_FILE)
# parse expected options into a dict to de-dupe
keys = ('name', 'start_time')
opts = dict(o for o in _parser.options if o[0] in keys)
# check for all keys
for k in keys:
if not opts.get(k):
return False
task_name = opts.get('name')[0]
# setup the paths
task_dir = self._get_task_dir(task_name)
task_config = os.path.join(task_dir, 'task.cfg')
# validate start time
value = opts.get('start_time')[0]
start_time = datetime.datetime.strptime(value,
'%Y-%m-%d %H:%M:%S.%f')
# get user id for process ownership when running task
# here, we use the owner of the active file
file_meta = os.stat(self._paths['active_file'])
owner = file_meta.st_uid
# parse task config and send its options to registered plugins
_parser = parser.parse_config(task_config, self.HEADER_TASK_CONFIG)
registration.run_option_hooks(_parser)
self._name = common.from_utf8(task_name)
self._start_time = start_time
self._owner = owner
self._paths['task_dir'] = task_dir
self._paths['task_config'] = task_config
self._loaded = True
except (parser.ParseError, ValueError, TypeError, OSError):
# something failed, cleanup
self._clean()
self._clean_prior()
return self._loaded | python | def load(self):
""" Loads a task if the active file is available.
"""
try:
_parser = parser.parse_config(self._paths['active_file'],
self.HEADER_ACTIVE_FILE)
# parse expected options into a dict to de-dupe
keys = ('name', 'start_time')
opts = dict(o for o in _parser.options if o[0] in keys)
# check for all keys
for k in keys:
if not opts.get(k):
return False
task_name = opts.get('name')[0]
# setup the paths
task_dir = self._get_task_dir(task_name)
task_config = os.path.join(task_dir, 'task.cfg')
# validate start time
value = opts.get('start_time')[0]
start_time = datetime.datetime.strptime(value,
'%Y-%m-%d %H:%M:%S.%f')
# get user id for process ownership when running task
# here, we use the owner of the active file
file_meta = os.stat(self._paths['active_file'])
owner = file_meta.st_uid
# parse task config and send its options to registered plugins
_parser = parser.parse_config(task_config, self.HEADER_TASK_CONFIG)
registration.run_option_hooks(_parser)
self._name = common.from_utf8(task_name)
self._start_time = start_time
self._owner = owner
self._paths['task_dir'] = task_dir
self._paths['task_config'] = task_config
self._loaded = True
except (parser.ParseError, ValueError, TypeError, OSError):
# something failed, cleanup
self._clean()
self._clean_prior()
return self._loaded | [
"def",
"load",
"(",
"self",
")",
":",
"try",
":",
"_parser",
"=",
"parser",
".",
"parse_config",
"(",
"self",
".",
"_paths",
"[",
"'active_file'",
"]",
",",
"self",
".",
"HEADER_ACTIVE_FILE",
")",
"# parse expected options into a dict to de-dupe",
"keys",
"=",
"(",
"'name'",
",",
"'start_time'",
")",
"opts",
"=",
"dict",
"(",
"o",
"for",
"o",
"in",
"_parser",
".",
"options",
"if",
"o",
"[",
"0",
"]",
"in",
"keys",
")",
"# check for all keys",
"for",
"k",
"in",
"keys",
":",
"if",
"not",
"opts",
".",
"get",
"(",
"k",
")",
":",
"return",
"False",
"task_name",
"=",
"opts",
".",
"get",
"(",
"'name'",
")",
"[",
"0",
"]",
"# setup the paths",
"task_dir",
"=",
"self",
".",
"_get_task_dir",
"(",
"task_name",
")",
"task_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"task_dir",
",",
"'task.cfg'",
")",
"# validate start time",
"value",
"=",
"opts",
".",
"get",
"(",
"'start_time'",
")",
"[",
"0",
"]",
"start_time",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"value",
",",
"'%Y-%m-%d %H:%M:%S.%f'",
")",
"# get user id for process ownership when running task",
"# here, we use the owner of the active file",
"file_meta",
"=",
"os",
".",
"stat",
"(",
"self",
".",
"_paths",
"[",
"'active_file'",
"]",
")",
"owner",
"=",
"file_meta",
".",
"st_uid",
"# parse task config and send its options to registered plugins",
"_parser",
"=",
"parser",
".",
"parse_config",
"(",
"task_config",
",",
"self",
".",
"HEADER_TASK_CONFIG",
")",
"registration",
".",
"run_option_hooks",
"(",
"_parser",
")",
"self",
".",
"_name",
"=",
"common",
".",
"from_utf8",
"(",
"task_name",
")",
"self",
".",
"_start_time",
"=",
"start_time",
"self",
".",
"_owner",
"=",
"owner",
"self",
".",
"_paths",
"[",
"'task_dir'",
"]",
"=",
"task_dir",
"self",
".",
"_paths",
"[",
"'task_config'",
"]",
"=",
"task_config",
"self",
".",
"_loaded",
"=",
"True",
"except",
"(",
"parser",
".",
"ParseError",
",",
"ValueError",
",",
"TypeError",
",",
"OSError",
")",
":",
"# something failed, cleanup",
"self",
".",
"_clean",
"(",
")",
"self",
".",
"_clean_prior",
"(",
")",
"return",
"self",
".",
"_loaded"
] | Loads a task if the active file is available. | [
"Loads",
"a",
"task",
"if",
"the",
"active",
"file",
"is",
"available",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L129-L177 |
250,047 | xtrementl/focus | focus/task.py | Task.exists | def exists(self, task_name):
""" Determines if task directory exists.
`task_name`
Task name.
Returns ``True`` if task exists.
"""
try:
return os.path.exists(self._get_task_dir(task_name))
except OSError:
return False | python | def exists(self, task_name):
""" Determines if task directory exists.
`task_name`
Task name.
Returns ``True`` if task exists.
"""
try:
return os.path.exists(self._get_task_dir(task_name))
except OSError:
return False | [
"def",
"exists",
"(",
"self",
",",
"task_name",
")",
":",
"try",
":",
"return",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_get_task_dir",
"(",
"task_name",
")",
")",
"except",
"OSError",
":",
"return",
"False"
] | Determines if task directory exists.
`task_name`
Task name.
Returns ``True`` if task exists. | [
"Determines",
"if",
"task",
"directory",
"exists",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L179-L192 |
250,048 | xtrementl/focus | focus/task.py | Task.create | def create(self, task_name, clone_task=None):
""" Creates a new task directory.
`task_name`
Task name.
`clone_task`
Existing task name to use as a template for new task.
Returns boolean.
* Raises ``Value`` if task name is invalid, ``TaskExists`` if task
already exists, or ``TaskNotFound`` if task for `clone_from`
doesn't exist.
"""
if not task_name or task_name.startswith('-'):
raise ValueError('Invalid task name')
try:
task_dir = self._get_task_dir(task_name)
if self.exists(task_dir):
raise errors.TaskExists(task_name)
task_cfg = self.get_config_path(task_name)
if clone_task:
if not self.exists(clone_task):
raise errors.TaskNotFound(clone_task)
# copy task directory
shutil.copytree(self._get_task_dir(clone_task), task_dir)
else:
os.mkdir(task_dir)
# write default task configuration
shutil.copy(self._default_task_config, task_cfg)
return True
except OSError:
shutil.rmtree(task_dir, ignore_errors=True)
return False | python | def create(self, task_name, clone_task=None):
""" Creates a new task directory.
`task_name`
Task name.
`clone_task`
Existing task name to use as a template for new task.
Returns boolean.
* Raises ``Value`` if task name is invalid, ``TaskExists`` if task
already exists, or ``TaskNotFound`` if task for `clone_from`
doesn't exist.
"""
if not task_name or task_name.startswith('-'):
raise ValueError('Invalid task name')
try:
task_dir = self._get_task_dir(task_name)
if self.exists(task_dir):
raise errors.TaskExists(task_name)
task_cfg = self.get_config_path(task_name)
if clone_task:
if not self.exists(clone_task):
raise errors.TaskNotFound(clone_task)
# copy task directory
shutil.copytree(self._get_task_dir(clone_task), task_dir)
else:
os.mkdir(task_dir)
# write default task configuration
shutil.copy(self._default_task_config, task_cfg)
return True
except OSError:
shutil.rmtree(task_dir, ignore_errors=True)
return False | [
"def",
"create",
"(",
"self",
",",
"task_name",
",",
"clone_task",
"=",
"None",
")",
":",
"if",
"not",
"task_name",
"or",
"task_name",
".",
"startswith",
"(",
"'-'",
")",
":",
"raise",
"ValueError",
"(",
"'Invalid task name'",
")",
"try",
":",
"task_dir",
"=",
"self",
".",
"_get_task_dir",
"(",
"task_name",
")",
"if",
"self",
".",
"exists",
"(",
"task_dir",
")",
":",
"raise",
"errors",
".",
"TaskExists",
"(",
"task_name",
")",
"task_cfg",
"=",
"self",
".",
"get_config_path",
"(",
"task_name",
")",
"if",
"clone_task",
":",
"if",
"not",
"self",
".",
"exists",
"(",
"clone_task",
")",
":",
"raise",
"errors",
".",
"TaskNotFound",
"(",
"clone_task",
")",
"# copy task directory",
"shutil",
".",
"copytree",
"(",
"self",
".",
"_get_task_dir",
"(",
"clone_task",
")",
",",
"task_dir",
")",
"else",
":",
"os",
".",
"mkdir",
"(",
"task_dir",
")",
"# write default task configuration",
"shutil",
".",
"copy",
"(",
"self",
".",
"_default_task_config",
",",
"task_cfg",
")",
"return",
"True",
"except",
"OSError",
":",
"shutil",
".",
"rmtree",
"(",
"task_dir",
",",
"ignore_errors",
"=",
"True",
")",
"return",
"False"
] | Creates a new task directory.
`task_name`
Task name.
`clone_task`
Existing task name to use as a template for new task.
Returns boolean.
* Raises ``Value`` if task name is invalid, ``TaskExists`` if task
already exists, or ``TaskNotFound`` if task for `clone_from`
doesn't exist. | [
"Creates",
"a",
"new",
"task",
"directory",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L205-L248 |
250,049 | xtrementl/focus | focus/task.py | Task.rename | def rename(self, old_task_name, new_task_name):
""" Renames an existing task directory.
`old_task_name`
Current task name.
`new_task_name`
New task name.
Returns ``True`` if rename successful.
"""
if not old_task_name or old_task_name.startswith('-'):
raise ValueError('Old task name is invalid')
if not new_task_name or new_task_name.startswith('-'):
raise ValueError('New new task name is invalid')
if old_task_name == new_task_name:
raise ValueError('Cannot rename task to itself')
try:
old_task_dir = self._get_task_dir(old_task_name)
if not self.exists(old_task_dir):
raise errors.TaskNotFound(old_task_name)
new_task_dir = self._get_task_dir(new_task_name)
if self.exists(new_task_dir):
raise errors.TaskExists(new_task_name)
os.rename(old_task_dir, new_task_dir)
return True
except OSError:
return False | python | def rename(self, old_task_name, new_task_name):
""" Renames an existing task directory.
`old_task_name`
Current task name.
`new_task_name`
New task name.
Returns ``True`` if rename successful.
"""
if not old_task_name or old_task_name.startswith('-'):
raise ValueError('Old task name is invalid')
if not new_task_name or new_task_name.startswith('-'):
raise ValueError('New new task name is invalid')
if old_task_name == new_task_name:
raise ValueError('Cannot rename task to itself')
try:
old_task_dir = self._get_task_dir(old_task_name)
if not self.exists(old_task_dir):
raise errors.TaskNotFound(old_task_name)
new_task_dir = self._get_task_dir(new_task_name)
if self.exists(new_task_dir):
raise errors.TaskExists(new_task_name)
os.rename(old_task_dir, new_task_dir)
return True
except OSError:
return False | [
"def",
"rename",
"(",
"self",
",",
"old_task_name",
",",
"new_task_name",
")",
":",
"if",
"not",
"old_task_name",
"or",
"old_task_name",
".",
"startswith",
"(",
"'-'",
")",
":",
"raise",
"ValueError",
"(",
"'Old task name is invalid'",
")",
"if",
"not",
"new_task_name",
"or",
"new_task_name",
".",
"startswith",
"(",
"'-'",
")",
":",
"raise",
"ValueError",
"(",
"'New new task name is invalid'",
")",
"if",
"old_task_name",
"==",
"new_task_name",
":",
"raise",
"ValueError",
"(",
"'Cannot rename task to itself'",
")",
"try",
":",
"old_task_dir",
"=",
"self",
".",
"_get_task_dir",
"(",
"old_task_name",
")",
"if",
"not",
"self",
".",
"exists",
"(",
"old_task_dir",
")",
":",
"raise",
"errors",
".",
"TaskNotFound",
"(",
"old_task_name",
")",
"new_task_dir",
"=",
"self",
".",
"_get_task_dir",
"(",
"new_task_name",
")",
"if",
"self",
".",
"exists",
"(",
"new_task_dir",
")",
":",
"raise",
"errors",
".",
"TaskExists",
"(",
"new_task_name",
")",
"os",
".",
"rename",
"(",
"old_task_dir",
",",
"new_task_dir",
")",
"return",
"True",
"except",
"OSError",
":",
"return",
"False"
] | Renames an existing task directory.
`old_task_name`
Current task name.
`new_task_name`
New task name.
Returns ``True`` if rename successful. | [
"Renames",
"an",
"existing",
"task",
"directory",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L250-L283 |
250,050 | xtrementl/focus | focus/task.py | Task.remove | def remove(self, task_name):
""" Removes an existing task directory.
`task_name`
Task name.
Returns ``True`` if removal successful.
"""
try:
task_dir = self._get_task_dir(task_name)
shutil.rmtree(task_dir)
return True
except OSError:
return False | python | def remove(self, task_name):
""" Removes an existing task directory.
`task_name`
Task name.
Returns ``True`` if removal successful.
"""
try:
task_dir = self._get_task_dir(task_name)
shutil.rmtree(task_dir)
return True
except OSError:
return False | [
"def",
"remove",
"(",
"self",
",",
"task_name",
")",
":",
"try",
":",
"task_dir",
"=",
"self",
".",
"_get_task_dir",
"(",
"task_name",
")",
"shutil",
".",
"rmtree",
"(",
"task_dir",
")",
"return",
"True",
"except",
"OSError",
":",
"return",
"False"
] | Removes an existing task directory.
`task_name`
Task name.
Returns ``True`` if removal successful. | [
"Removes",
"an",
"existing",
"task",
"directory",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L285-L300 |
250,051 | xtrementl/focus | focus/task.py | Task.get_list_info | def get_list_info(self, task_name=None):
""" Lists all tasks and associated information.
`task_name`
Task name to limit. Default: return all valid tasks.
Returns list of tuples (task_name, options, block_options)
"""
try:
tasks = []
# get all tasks dirs
tasks_dir = os.path.join(self._paths['base_dir'], 'tasks')
if task_name:
# if task folder doesn't exist, return nothing
if not os.path.isdir(os.path.join(tasks_dir, task_name)):
return []
task_names = [task_name]
else:
task_names = [name for name in os.listdir(tasks_dir)
if os.path.isdir(os.path.join(tasks_dir, name))]
task_names.sort()
for name in task_names:
try:
# parse task config and run option hooks
task_config = os.path.join(tasks_dir, name, 'task.cfg')
parser_ = parser.parse_config(task_config,
self.HEADER_TASK_CONFIG)
registration.run_option_hooks(parser_,
disable_missing=False)
tasks.append((name, parser_.options, parser_.blocks))
except (parser.ParseError, errors.InvalidTaskConfig):
tasks.append((name, None, None))
return tasks
except OSError:
return [] | python | def get_list_info(self, task_name=None):
""" Lists all tasks and associated information.
`task_name`
Task name to limit. Default: return all valid tasks.
Returns list of tuples (task_name, options, block_options)
"""
try:
tasks = []
# get all tasks dirs
tasks_dir = os.path.join(self._paths['base_dir'], 'tasks')
if task_name:
# if task folder doesn't exist, return nothing
if not os.path.isdir(os.path.join(tasks_dir, task_name)):
return []
task_names = [task_name]
else:
task_names = [name for name in os.listdir(tasks_dir)
if os.path.isdir(os.path.join(tasks_dir, name))]
task_names.sort()
for name in task_names:
try:
# parse task config and run option hooks
task_config = os.path.join(tasks_dir, name, 'task.cfg')
parser_ = parser.parse_config(task_config,
self.HEADER_TASK_CONFIG)
registration.run_option_hooks(parser_,
disable_missing=False)
tasks.append((name, parser_.options, parser_.blocks))
except (parser.ParseError, errors.InvalidTaskConfig):
tasks.append((name, None, None))
return tasks
except OSError:
return [] | [
"def",
"get_list_info",
"(",
"self",
",",
"task_name",
"=",
"None",
")",
":",
"try",
":",
"tasks",
"=",
"[",
"]",
"# get all tasks dirs",
"tasks_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_paths",
"[",
"'base_dir'",
"]",
",",
"'tasks'",
")",
"if",
"task_name",
":",
"# if task folder doesn't exist, return nothing",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tasks_dir",
",",
"task_name",
")",
")",
":",
"return",
"[",
"]",
"task_names",
"=",
"[",
"task_name",
"]",
"else",
":",
"task_names",
"=",
"[",
"name",
"for",
"name",
"in",
"os",
".",
"listdir",
"(",
"tasks_dir",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tasks_dir",
",",
"name",
")",
")",
"]",
"task_names",
".",
"sort",
"(",
")",
"for",
"name",
"in",
"task_names",
":",
"try",
":",
"# parse task config and run option hooks",
"task_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tasks_dir",
",",
"name",
",",
"'task.cfg'",
")",
"parser_",
"=",
"parser",
".",
"parse_config",
"(",
"task_config",
",",
"self",
".",
"HEADER_TASK_CONFIG",
")",
"registration",
".",
"run_option_hooks",
"(",
"parser_",
",",
"disable_missing",
"=",
"False",
")",
"tasks",
".",
"append",
"(",
"(",
"name",
",",
"parser_",
".",
"options",
",",
"parser_",
".",
"blocks",
")",
")",
"except",
"(",
"parser",
".",
"ParseError",
",",
"errors",
".",
"InvalidTaskConfig",
")",
":",
"tasks",
".",
"append",
"(",
"(",
"name",
",",
"None",
",",
"None",
")",
")",
"return",
"tasks",
"except",
"OSError",
":",
"return",
"[",
"]"
] | Lists all tasks and associated information.
`task_name`
Task name to limit. Default: return all valid tasks.
Returns list of tuples (task_name, options, block_options) | [
"Lists",
"all",
"tasks",
"and",
"associated",
"information",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L302-L345 |
250,052 | xtrementl/focus | focus/task.py | Task.start | def start(self, task_name):
""" Starts a new task matching the provided name.
`task_name`
Name of existing task to start.
Returns boolean.
* Raises a ``TaskNotFound`` exception if task doesn't exist, an
``InvalidTaskConfig` exception if task config file is invalid, or
``DaemonFailStart`` exception if task daemons failed to fork.
"""
self._clean_prior()
if self._loaded:
raise errors.ActiveTask
# get paths
task_dir = os.path.join(self._paths['base_dir'], 'tasks', task_name)
task_config = os.path.join(task_dir, 'task.cfg')
if not os.path.isdir(task_dir):
raise errors.TaskNotFound(task_name)
try:
# raise if task config is missing
if not os.path.isfile(task_config):
reason = u"Config file could not be found."
raise errors.InvalidTaskConfig(task_config, reason=reason)
# parse task config and send its options to registered plugins
_parser = parser.parse_config(task_config, self.HEADER_TASK_CONFIG)
registration.run_option_hooks(_parser)
except parser.ParseError as exc:
raise errors.InvalidTaskConfig(task_config,
reason=unicode(exc))
# populate task info
self._name = common.from_utf8(task_name)
self._start_time = datetime.datetime.now()
self._owner = os.getuid()
self._paths['task_dir'] = task_dir
self._paths['task_config'] = task_config
self._loaded = True
# task is setup, save active file
# note, order is *important*; this is needed first
# for the daemon to load
self._save_active_file()
# shell the focusd daemon
try:
started = daemon.shell_focusd(self._paths['base_dir'])
# user cancelled or passwords failed?
except (KeyboardInterrupt, ValueError):
self._clean()
return False
# no event plugins registered, carry on
except errors.NoPluginsRegistered:
return True
# failed, cleanup our mess
if not started:
self._clean()
raise errors.DaemonFailStart
return True | python | def start(self, task_name):
""" Starts a new task matching the provided name.
`task_name`
Name of existing task to start.
Returns boolean.
* Raises a ``TaskNotFound`` exception if task doesn't exist, an
``InvalidTaskConfig` exception if task config file is invalid, or
``DaemonFailStart`` exception if task daemons failed to fork.
"""
self._clean_prior()
if self._loaded:
raise errors.ActiveTask
# get paths
task_dir = os.path.join(self._paths['base_dir'], 'tasks', task_name)
task_config = os.path.join(task_dir, 'task.cfg')
if not os.path.isdir(task_dir):
raise errors.TaskNotFound(task_name)
try:
# raise if task config is missing
if not os.path.isfile(task_config):
reason = u"Config file could not be found."
raise errors.InvalidTaskConfig(task_config, reason=reason)
# parse task config and send its options to registered plugins
_parser = parser.parse_config(task_config, self.HEADER_TASK_CONFIG)
registration.run_option_hooks(_parser)
except parser.ParseError as exc:
raise errors.InvalidTaskConfig(task_config,
reason=unicode(exc))
# populate task info
self._name = common.from_utf8(task_name)
self._start_time = datetime.datetime.now()
self._owner = os.getuid()
self._paths['task_dir'] = task_dir
self._paths['task_config'] = task_config
self._loaded = True
# task is setup, save active file
# note, order is *important*; this is needed first
# for the daemon to load
self._save_active_file()
# shell the focusd daemon
try:
started = daemon.shell_focusd(self._paths['base_dir'])
# user cancelled or passwords failed?
except (KeyboardInterrupt, ValueError):
self._clean()
return False
# no event plugins registered, carry on
except errors.NoPluginsRegistered:
return True
# failed, cleanup our mess
if not started:
self._clean()
raise errors.DaemonFailStart
return True | [
"def",
"start",
"(",
"self",
",",
"task_name",
")",
":",
"self",
".",
"_clean_prior",
"(",
")",
"if",
"self",
".",
"_loaded",
":",
"raise",
"errors",
".",
"ActiveTask",
"# get paths",
"task_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_paths",
"[",
"'base_dir'",
"]",
",",
"'tasks'",
",",
"task_name",
")",
"task_config",
"=",
"os",
".",
"path",
".",
"join",
"(",
"task_dir",
",",
"'task.cfg'",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"task_dir",
")",
":",
"raise",
"errors",
".",
"TaskNotFound",
"(",
"task_name",
")",
"try",
":",
"# raise if task config is missing",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"task_config",
")",
":",
"reason",
"=",
"u\"Config file could not be found.\"",
"raise",
"errors",
".",
"InvalidTaskConfig",
"(",
"task_config",
",",
"reason",
"=",
"reason",
")",
"# parse task config and send its options to registered plugins",
"_parser",
"=",
"parser",
".",
"parse_config",
"(",
"task_config",
",",
"self",
".",
"HEADER_TASK_CONFIG",
")",
"registration",
".",
"run_option_hooks",
"(",
"_parser",
")",
"except",
"parser",
".",
"ParseError",
"as",
"exc",
":",
"raise",
"errors",
".",
"InvalidTaskConfig",
"(",
"task_config",
",",
"reason",
"=",
"unicode",
"(",
"exc",
")",
")",
"# populate task info",
"self",
".",
"_name",
"=",
"common",
".",
"from_utf8",
"(",
"task_name",
")",
"self",
".",
"_start_time",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"self",
".",
"_owner",
"=",
"os",
".",
"getuid",
"(",
")",
"self",
".",
"_paths",
"[",
"'task_dir'",
"]",
"=",
"task_dir",
"self",
".",
"_paths",
"[",
"'task_config'",
"]",
"=",
"task_config",
"self",
".",
"_loaded",
"=",
"True",
"# task is setup, save active file",
"# note, order is *important*; this is needed first",
"# for the daemon to load",
"self",
".",
"_save_active_file",
"(",
")",
"# shell the focusd daemon",
"try",
":",
"started",
"=",
"daemon",
".",
"shell_focusd",
"(",
"self",
".",
"_paths",
"[",
"'base_dir'",
"]",
")",
"# user cancelled or passwords failed?",
"except",
"(",
"KeyboardInterrupt",
",",
"ValueError",
")",
":",
"self",
".",
"_clean",
"(",
")",
"return",
"False",
"# no event plugins registered, carry on",
"except",
"errors",
".",
"NoPluginsRegistered",
":",
"return",
"True",
"# failed, cleanup our mess",
"if",
"not",
"started",
":",
"self",
".",
"_clean",
"(",
")",
"raise",
"errors",
".",
"DaemonFailStart",
"return",
"True"
] | Starts a new task matching the provided name.
`task_name`
Name of existing task to start.
Returns boolean.
* Raises a ``TaskNotFound`` exception if task doesn't exist, an
``InvalidTaskConfig` exception if task config file is invalid, or
``DaemonFailStart`` exception if task daemons failed to fork. | [
"Starts",
"a",
"new",
"task",
"matching",
"the",
"provided",
"name",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L347-L417 |
250,053 | xtrementl/focus | focus/task.py | Task.stop | def stop(self):
""" Stops the current task and cleans up, including removing active
task config file.
* Raises ``NoActiveTask`` exception if no active task found.
"""
self._clean_prior()
if not self._loaded:
raise errors.NoActiveTask
self._clean() | python | def stop(self):
""" Stops the current task and cleans up, including removing active
task config file.
* Raises ``NoActiveTask`` exception if no active task found.
"""
self._clean_prior()
if not self._loaded:
raise errors.NoActiveTask
self._clean() | [
"def",
"stop",
"(",
"self",
")",
":",
"self",
".",
"_clean_prior",
"(",
")",
"if",
"not",
"self",
".",
"_loaded",
":",
"raise",
"errors",
".",
"NoActiveTask",
"self",
".",
"_clean",
"(",
")"
] | Stops the current task and cleans up, including removing active
task config file.
* Raises ``NoActiveTask`` exception if no active task found. | [
"Stops",
"the",
"current",
"task",
"and",
"cleans",
"up",
"including",
"removing",
"active",
"task",
"config",
"file",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L419-L431 |
250,054 | xtrementl/focus | focus/task.py | Task.set_total_duration | def set_total_duration(self, duration):
""" Set the total task duration in minutes.
"""
if duration < 1:
raise ValueError(u'Duration must be postive')
elif self.duration > duration:
raise ValueError(u'{0} must be greater than current duration')
self._total_duration = duration | python | def set_total_duration(self, duration):
""" Set the total task duration in minutes.
"""
if duration < 1:
raise ValueError(u'Duration must be postive')
elif self.duration > duration:
raise ValueError(u'{0} must be greater than current duration')
self._total_duration = duration | [
"def",
"set_total_duration",
"(",
"self",
",",
"duration",
")",
":",
"if",
"duration",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"u'Duration must be postive'",
")",
"elif",
"self",
".",
"duration",
">",
"duration",
":",
"raise",
"ValueError",
"(",
"u'{0} must be greater than current duration'",
")",
"self",
".",
"_total_duration",
"=",
"duration"
] | Set the total task duration in minutes. | [
"Set",
"the",
"total",
"task",
"duration",
"in",
"minutes",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L433-L442 |
250,055 | xtrementl/focus | focus/task.py | Task.active | def active(self):
""" Returns if task is active.
"""
if not os.path.isfile(self._paths['active_file']):
return False
return self._loaded | python | def active(self):
""" Returns if task is active.
"""
if not os.path.isfile(self._paths['active_file']):
return False
return self._loaded | [
"def",
"active",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"self",
".",
"_paths",
"[",
"'active_file'",
"]",
")",
":",
"return",
"False",
"return",
"self",
".",
"_loaded"
] | Returns if task is active. | [
"Returns",
"if",
"task",
"is",
"active",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L454-L459 |
250,056 | xtrementl/focus | focus/task.py | Task.duration | def duration(self):
""" Returns task's current duration in minutes.
"""
if not self._loaded:
return 0
delta = datetime.datetime.now() - self._start_time
total_secs = (delta.microseconds +
(delta.seconds + delta.days * 24 * 3600) *
10 ** 6) / 10 ** 6
return max(0, int(round(total_secs / 60.0))) | python | def duration(self):
""" Returns task's current duration in minutes.
"""
if not self._loaded:
return 0
delta = datetime.datetime.now() - self._start_time
total_secs = (delta.microseconds +
(delta.seconds + delta.days * 24 * 3600) *
10 ** 6) / 10 ** 6
return max(0, int(round(total_secs / 60.0))) | [
"def",
"duration",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_loaded",
":",
"return",
"0",
"delta",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"self",
".",
"_start_time",
"total_secs",
"=",
"(",
"delta",
".",
"microseconds",
"+",
"(",
"delta",
".",
"seconds",
"+",
"delta",
".",
"days",
"*",
"24",
"*",
"3600",
")",
"*",
"10",
"**",
"6",
")",
"/",
"10",
"**",
"6",
"return",
"max",
"(",
"0",
",",
"int",
"(",
"round",
"(",
"total_secs",
"/",
"60.0",
")",
")",
")"
] | Returns task's current duration in minutes. | [
"Returns",
"task",
"s",
"current",
"duration",
"in",
"minutes",
"."
] | cbbbc0b49a7409f9e0dc899de5b7e057f50838e4 | https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/task.py#L474-L486 |
250,057 | heikomuller/sco-client | scocli/cli.py | SCOCmdLine.list_objects | def list_objects(self, resources):
"""Generate a listing for a set of resource handles consisting of
resource identifier, name, and timestamp.
Parameters
----------
resources : list(ResourceHandle)
List of resource handles
Returns
-------
list(string)
"""
result = []
for res in resources:
result.append('\t'.join([res.identifier, res.name, str(res.timestamp)[:19]]))
return result | python | def list_objects(self, resources):
"""Generate a listing for a set of resource handles consisting of
resource identifier, name, and timestamp.
Parameters
----------
resources : list(ResourceHandle)
List of resource handles
Returns
-------
list(string)
"""
result = []
for res in resources:
result.append('\t'.join([res.identifier, res.name, str(res.timestamp)[:19]]))
return result | [
"def",
"list_objects",
"(",
"self",
",",
"resources",
")",
":",
"result",
"=",
"[",
"]",
"for",
"res",
"in",
"resources",
":",
"result",
".",
"append",
"(",
"'\\t'",
".",
"join",
"(",
"[",
"res",
".",
"identifier",
",",
"res",
".",
"name",
",",
"str",
"(",
"res",
".",
"timestamp",
")",
"[",
":",
"19",
"]",
"]",
")",
")",
"return",
"result"
] | Generate a listing for a set of resource handles consisting of
resource identifier, name, and timestamp.
Parameters
----------
resources : list(ResourceHandle)
List of resource handles
Returns
-------
list(string) | [
"Generate",
"a",
"listing",
"for",
"a",
"set",
"of",
"resource",
"handles",
"consisting",
"of",
"resource",
"identifier",
"name",
"and",
"timestamp",
"."
] | c4afab71297f73003379bba4c1679be9dcf7cef8 | https://github.com/heikomuller/sco-client/blob/c4afab71297f73003379bba4c1679be9dcf7cef8/scocli/cli.py#L49-L65 |
250,058 | klen/muffin-babel | example/__init__.py | set_locale | def set_locale(request):
"""Return locale from GET lang param or automatically."""
return request.query.get('lang', app.ps.babel.select_locale_by_request(request)) | python | def set_locale(request):
"""Return locale from GET lang param or automatically."""
return request.query.get('lang', app.ps.babel.select_locale_by_request(request)) | [
"def",
"set_locale",
"(",
"request",
")",
":",
"return",
"request",
".",
"query",
".",
"get",
"(",
"'lang'",
",",
"app",
".",
"ps",
".",
"babel",
".",
"select_locale_by_request",
"(",
"request",
")",
")"
] | Return locale from GET lang param or automatically. | [
"Return",
"locale",
"from",
"GET",
"lang",
"param",
"or",
"automatically",
"."
] | f48ebbbf7806c6c727f66d8d0df331b29f6ead08 | https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/example/__init__.py#L26-L28 |
250,059 | MitalAshok/objecttools | objecttools/serializable.py | SerializableFunction.globals | def globals(self):
"""Find the globals of `self` by importing `self.module`"""
try:
return vars(__import__(self.module, fromlist=self.module.split('.')))
except ImportError:
if self.warn_import:
warnings.warn(ImportWarning(
'Cannot import module {} for SerializableFunction. Restricting to builtins.'.format(self.module)
))
return {'__builtins__': __builtins__} | python | def globals(self):
"""Find the globals of `self` by importing `self.module`"""
try:
return vars(__import__(self.module, fromlist=self.module.split('.')))
except ImportError:
if self.warn_import:
warnings.warn(ImportWarning(
'Cannot import module {} for SerializableFunction. Restricting to builtins.'.format(self.module)
))
return {'__builtins__': __builtins__} | [
"def",
"globals",
"(",
"self",
")",
":",
"try",
":",
"return",
"vars",
"(",
"__import__",
"(",
"self",
".",
"module",
",",
"fromlist",
"=",
"self",
".",
"module",
".",
"split",
"(",
"'.'",
")",
")",
")",
"except",
"ImportError",
":",
"if",
"self",
".",
"warn_import",
":",
"warnings",
".",
"warn",
"(",
"ImportWarning",
"(",
"'Cannot import module {} for SerializableFunction. Restricting to builtins.'",
".",
"format",
"(",
"self",
".",
"module",
")",
")",
")",
"return",
"{",
"'__builtins__'",
":",
"__builtins__",
"}"
] | Find the globals of `self` by importing `self.module` | [
"Find",
"the",
"globals",
"of",
"self",
"by",
"importing",
"self",
".",
"module"
] | bddd14d1f702c8b559d3fcc2099bc22370e16de7 | https://github.com/MitalAshok/objecttools/blob/bddd14d1f702c8b559d3fcc2099bc22370e16de7/objecttools/serializable.py#L161-L170 |
250,060 | MitalAshok/objecttools | objecttools/serializable.py | SerializableConstant.value | def value(self):
"""Import the constant from `self.module`"""
module = __import__(self.module, fromlist=self.module.split('.'))
if self.name is None:
return module
return getattr(module, self.name) | python | def value(self):
"""Import the constant from `self.module`"""
module = __import__(self.module, fromlist=self.module.split('.'))
if self.name is None:
return module
return getattr(module, self.name) | [
"def",
"value",
"(",
"self",
")",
":",
"module",
"=",
"__import__",
"(",
"self",
".",
"module",
",",
"fromlist",
"=",
"self",
".",
"module",
".",
"split",
"(",
"'.'",
")",
")",
"if",
"self",
".",
"name",
"is",
"None",
":",
"return",
"module",
"return",
"getattr",
"(",
"module",
",",
"self",
".",
"name",
")"
] | Import the constant from `self.module` | [
"Import",
"the",
"constant",
"from",
"self",
".",
"module"
] | bddd14d1f702c8b559d3fcc2099bc22370e16de7 | https://github.com/MitalAshok/objecttools/blob/bddd14d1f702c8b559d3fcc2099bc22370e16de7/objecttools/serializable.py#L260-L265 |
250,061 | emlazzarin/acrylic | acrylic/groupby.py | GroupbyTable.agg | def agg(self, func, *fields, **name):
"""
Calls the aggregation function `func` on each group in the GroubyTable,
and leaves the results in a new column with the name of the aggregation
function.
Call `.agg` with `name='desired_column_name' to choose a column
name for this aggregation.
"""
if name:
if len(name) > 1 or 'name' not in name:
raise TypeError("Unknown keyword args passed into `agg`: %s"
% name)
name = name.get('name')
if not isinstance(name, basestring):
raise TypeError("Column names must be strings, not `%s`"
% type(name))
else:
name = name
elif func.__name__ == '<lambda>':
name = "lambda%04d" % self.__lambda_num
self.__lambda_num += 1
name += "(%s)" % ','.join(fields)
else:
name = func.__name__
name += "(%s)" % ','.join(fields)
aggregated_column = []
if len(fields) > 1:
for groupkey in self.__grouptable['groupkey']:
agg_data = [tuple([row[field] for field in fields])
for row in self.__key_to_group_map[groupkey]]
aggregated_column.append(func(agg_data))
elif len(fields) == 1:
field = fields[0]
for groupkey in self.__grouptable['groupkey']:
agg_data = [row[field]
for row in self.__key_to_group_map[groupkey]]
aggregated_column.append(func(agg_data))
else:
for groupkey in self.__grouptable['groupkey']:
agg_data = self.__key_to_group_map[groupkey]
aggregated_column.append(func(agg_data))
self.__grouptable[name] = aggregated_column
return self | python | def agg(self, func, *fields, **name):
"""
Calls the aggregation function `func` on each group in the GroubyTable,
and leaves the results in a new column with the name of the aggregation
function.
Call `.agg` with `name='desired_column_name' to choose a column
name for this aggregation.
"""
if name:
if len(name) > 1 or 'name' not in name:
raise TypeError("Unknown keyword args passed into `agg`: %s"
% name)
name = name.get('name')
if not isinstance(name, basestring):
raise TypeError("Column names must be strings, not `%s`"
% type(name))
else:
name = name
elif func.__name__ == '<lambda>':
name = "lambda%04d" % self.__lambda_num
self.__lambda_num += 1
name += "(%s)" % ','.join(fields)
else:
name = func.__name__
name += "(%s)" % ','.join(fields)
aggregated_column = []
if len(fields) > 1:
for groupkey in self.__grouptable['groupkey']:
agg_data = [tuple([row[field] for field in fields])
for row in self.__key_to_group_map[groupkey]]
aggregated_column.append(func(agg_data))
elif len(fields) == 1:
field = fields[0]
for groupkey in self.__grouptable['groupkey']:
agg_data = [row[field]
for row in self.__key_to_group_map[groupkey]]
aggregated_column.append(func(agg_data))
else:
for groupkey in self.__grouptable['groupkey']:
agg_data = self.__key_to_group_map[groupkey]
aggregated_column.append(func(agg_data))
self.__grouptable[name] = aggregated_column
return self | [
"def",
"agg",
"(",
"self",
",",
"func",
",",
"*",
"fields",
",",
"*",
"*",
"name",
")",
":",
"if",
"name",
":",
"if",
"len",
"(",
"name",
")",
">",
"1",
"or",
"'name'",
"not",
"in",
"name",
":",
"raise",
"TypeError",
"(",
"\"Unknown keyword args passed into `agg`: %s\"",
"%",
"name",
")",
"name",
"=",
"name",
".",
"get",
"(",
"'name'",
")",
"if",
"not",
"isinstance",
"(",
"name",
",",
"basestring",
")",
":",
"raise",
"TypeError",
"(",
"\"Column names must be strings, not `%s`\"",
"%",
"type",
"(",
"name",
")",
")",
"else",
":",
"name",
"=",
"name",
"elif",
"func",
".",
"__name__",
"==",
"'<lambda>'",
":",
"name",
"=",
"\"lambda%04d\"",
"%",
"self",
".",
"__lambda_num",
"self",
".",
"__lambda_num",
"+=",
"1",
"name",
"+=",
"\"(%s)\"",
"%",
"','",
".",
"join",
"(",
"fields",
")",
"else",
":",
"name",
"=",
"func",
".",
"__name__",
"name",
"+=",
"\"(%s)\"",
"%",
"','",
".",
"join",
"(",
"fields",
")",
"aggregated_column",
"=",
"[",
"]",
"if",
"len",
"(",
"fields",
")",
">",
"1",
":",
"for",
"groupkey",
"in",
"self",
".",
"__grouptable",
"[",
"'groupkey'",
"]",
":",
"agg_data",
"=",
"[",
"tuple",
"(",
"[",
"row",
"[",
"field",
"]",
"for",
"field",
"in",
"fields",
"]",
")",
"for",
"row",
"in",
"self",
".",
"__key_to_group_map",
"[",
"groupkey",
"]",
"]",
"aggregated_column",
".",
"append",
"(",
"func",
"(",
"agg_data",
")",
")",
"elif",
"len",
"(",
"fields",
")",
"==",
"1",
":",
"field",
"=",
"fields",
"[",
"0",
"]",
"for",
"groupkey",
"in",
"self",
".",
"__grouptable",
"[",
"'groupkey'",
"]",
":",
"agg_data",
"=",
"[",
"row",
"[",
"field",
"]",
"for",
"row",
"in",
"self",
".",
"__key_to_group_map",
"[",
"groupkey",
"]",
"]",
"aggregated_column",
".",
"append",
"(",
"func",
"(",
"agg_data",
")",
")",
"else",
":",
"for",
"groupkey",
"in",
"self",
".",
"__grouptable",
"[",
"'groupkey'",
"]",
":",
"agg_data",
"=",
"self",
".",
"__key_to_group_map",
"[",
"groupkey",
"]",
"aggregated_column",
".",
"append",
"(",
"func",
"(",
"agg_data",
")",
")",
"self",
".",
"__grouptable",
"[",
"name",
"]",
"=",
"aggregated_column",
"return",
"self"
] | Calls the aggregation function `func` on each group in the GroubyTable,
and leaves the results in a new column with the name of the aggregation
function.
Call `.agg` with `name='desired_column_name' to choose a column
name for this aggregation. | [
"Calls",
"the",
"aggregation",
"function",
"func",
"on",
"each",
"group",
"in",
"the",
"GroubyTable",
"and",
"leaves",
"the",
"results",
"in",
"a",
"new",
"column",
"with",
"the",
"name",
"of",
"the",
"aggregation",
"function",
"."
] | 08c6702d73b9660ead1024653f4fa016f6340e46 | https://github.com/emlazzarin/acrylic/blob/08c6702d73b9660ead1024653f4fa016f6340e46/acrylic/groupby.py#L71-L117 |
250,062 | emlazzarin/acrylic | acrylic/groupby.py | GroupbyTable.collect | def collect(self):
"""
After adding the desired aggregation columns, `collect`
finalizes the groupby operation by converting the
GroupbyTable into a DataTable.
The first columns of the resulting table are the groupfields,
followed by the aggregation columns specified in preceeding
`agg` calls.
"""
# The final order of columns is determined by the
# group keys and the aggregation columns
final_field_order = list(self.__groupfields) + self.__grouptable.fields
# Transform the group key rows into columns
col_values = izip(*self.__grouptable['groupkey'])
# Assign the columns to the table with the relevant name
for groupfield, column in izip(self.__groupfields, col_values):
self.__grouptable[groupfield] = column
# Reorder the columns as defined above
self.__grouptable.reorder(final_field_order)
del self.__grouptable['groupkey']
return self.__grouptable | python | def collect(self):
"""
After adding the desired aggregation columns, `collect`
finalizes the groupby operation by converting the
GroupbyTable into a DataTable.
The first columns of the resulting table are the groupfields,
followed by the aggregation columns specified in preceeding
`agg` calls.
"""
# The final order of columns is determined by the
# group keys and the aggregation columns
final_field_order = list(self.__groupfields) + self.__grouptable.fields
# Transform the group key rows into columns
col_values = izip(*self.__grouptable['groupkey'])
# Assign the columns to the table with the relevant name
for groupfield, column in izip(self.__groupfields, col_values):
self.__grouptable[groupfield] = column
# Reorder the columns as defined above
self.__grouptable.reorder(final_field_order)
del self.__grouptable['groupkey']
return self.__grouptable | [
"def",
"collect",
"(",
"self",
")",
":",
"# The final order of columns is determined by the",
"# group keys and the aggregation columns",
"final_field_order",
"=",
"list",
"(",
"self",
".",
"__groupfields",
")",
"+",
"self",
".",
"__grouptable",
".",
"fields",
"# Transform the group key rows into columns",
"col_values",
"=",
"izip",
"(",
"*",
"self",
".",
"__grouptable",
"[",
"'groupkey'",
"]",
")",
"# Assign the columns to the table with the relevant name",
"for",
"groupfield",
",",
"column",
"in",
"izip",
"(",
"self",
".",
"__groupfields",
",",
"col_values",
")",
":",
"self",
".",
"__grouptable",
"[",
"groupfield",
"]",
"=",
"column",
"# Reorder the columns as defined above",
"self",
".",
"__grouptable",
".",
"reorder",
"(",
"final_field_order",
")",
"del",
"self",
".",
"__grouptable",
"[",
"'groupkey'",
"]",
"return",
"self",
".",
"__grouptable"
] | After adding the desired aggregation columns, `collect`
finalizes the groupby operation by converting the
GroupbyTable into a DataTable.
The first columns of the resulting table are the groupfields,
followed by the aggregation columns specified in preceeding
`agg` calls. | [
"After",
"adding",
"the",
"desired",
"aggregation",
"columns",
"collect",
"finalizes",
"the",
"groupby",
"operation",
"by",
"converting",
"the",
"GroupbyTable",
"into",
"a",
"DataTable",
"."
] | 08c6702d73b9660ead1024653f4fa016f6340e46 | https://github.com/emlazzarin/acrylic/blob/08c6702d73b9660ead1024653f4fa016f6340e46/acrylic/groupby.py#L124-L149 |
250,063 | eisensheng/kaviar | kaviar/api.py | kv_format_dict | def kv_format_dict(d, keys=None, separator=DEFAULT_SEPARATOR):
"""Formats the given dictionary ``d``.
For more details see :func:`kv_format`.
:param collections.Mapping d:
Dictionary containing values to format.
:param collections.Iterable keys:
List of keys to extract from the dict.
:param str separator:
Value between two pairs.
:return:
Key-Value formatted content generated from ``d``.
:rtype:
:data:`six.text_type <six:six.text_type>`
"""
return _format_pairs(dump_dict(d, keys), separator=separator) | python | def kv_format_dict(d, keys=None, separator=DEFAULT_SEPARATOR):
"""Formats the given dictionary ``d``.
For more details see :func:`kv_format`.
:param collections.Mapping d:
Dictionary containing values to format.
:param collections.Iterable keys:
List of keys to extract from the dict.
:param str separator:
Value between two pairs.
:return:
Key-Value formatted content generated from ``d``.
:rtype:
:data:`six.text_type <six:six.text_type>`
"""
return _format_pairs(dump_dict(d, keys), separator=separator) | [
"def",
"kv_format_dict",
"(",
"d",
",",
"keys",
"=",
"None",
",",
"separator",
"=",
"DEFAULT_SEPARATOR",
")",
":",
"return",
"_format_pairs",
"(",
"dump_dict",
"(",
"d",
",",
"keys",
")",
",",
"separator",
"=",
"separator",
")"
] | Formats the given dictionary ``d``.
For more details see :func:`kv_format`.
:param collections.Mapping d:
Dictionary containing values to format.
:param collections.Iterable keys:
List of keys to extract from the dict.
:param str separator:
Value between two pairs.
:return:
Key-Value formatted content generated from ``d``.
:rtype:
:data:`six.text_type <six:six.text_type>` | [
"Formats",
"the",
"given",
"dictionary",
"d",
"."
] | 77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f | https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/kaviar/api.py#L18-L34 |
250,064 | eisensheng/kaviar | kaviar/api.py | kv_format_object | def kv_format_object(o, keys=None, separator=DEFAULT_SEPARATOR):
"""Formats an object's attributes. Useful for object representation
implementation. Will skip methods or private attributes.
For more details see :func:`kv_format`.
:param o:
Object to format.
:param collections.Sequence keys:
Explicit list of attributes to format. ``None`` means all public
visible attribute for the given object will be formatted.
:param str separator:
Value between two pairs.
:return:
Formatted Object attributes.
:rtype:
:data:`six.text_type <six:six.text_type>`
"""
if keys is None:
key_values = []
for k, v in ((x, getattr(o, x)) for x in sorted(dir(o))):
if k.startswith('_') or isroutine(v):
continue
key_values += (k, v),
else:
key_values = ((k, getattr(o, k)) for k in keys)
return kv_format_pairs(key_values, separator) | python | def kv_format_object(o, keys=None, separator=DEFAULT_SEPARATOR):
"""Formats an object's attributes. Useful for object representation
implementation. Will skip methods or private attributes.
For more details see :func:`kv_format`.
:param o:
Object to format.
:param collections.Sequence keys:
Explicit list of attributes to format. ``None`` means all public
visible attribute for the given object will be formatted.
:param str separator:
Value between two pairs.
:return:
Formatted Object attributes.
:rtype:
:data:`six.text_type <six:six.text_type>`
"""
if keys is None:
key_values = []
for k, v in ((x, getattr(o, x)) for x in sorted(dir(o))):
if k.startswith('_') or isroutine(v):
continue
key_values += (k, v),
else:
key_values = ((k, getattr(o, k)) for k in keys)
return kv_format_pairs(key_values, separator) | [
"def",
"kv_format_object",
"(",
"o",
",",
"keys",
"=",
"None",
",",
"separator",
"=",
"DEFAULT_SEPARATOR",
")",
":",
"if",
"keys",
"is",
"None",
":",
"key_values",
"=",
"[",
"]",
"for",
"k",
",",
"v",
"in",
"(",
"(",
"x",
",",
"getattr",
"(",
"o",
",",
"x",
")",
")",
"for",
"x",
"in",
"sorted",
"(",
"dir",
"(",
"o",
")",
")",
")",
":",
"if",
"k",
".",
"startswith",
"(",
"'_'",
")",
"or",
"isroutine",
"(",
"v",
")",
":",
"continue",
"key_values",
"+=",
"(",
"k",
",",
"v",
")",
",",
"else",
":",
"key_values",
"=",
"(",
"(",
"k",
",",
"getattr",
"(",
"o",
",",
"k",
")",
")",
"for",
"k",
"in",
"keys",
")",
"return",
"kv_format_pairs",
"(",
"key_values",
",",
"separator",
")"
] | Formats an object's attributes. Useful for object representation
implementation. Will skip methods or private attributes.
For more details see :func:`kv_format`.
:param o:
Object to format.
:param collections.Sequence keys:
Explicit list of attributes to format. ``None`` means all public
visible attribute for the given object will be formatted.
:param str separator:
Value between two pairs.
:return:
Formatted Object attributes.
:rtype:
:data:`six.text_type <six:six.text_type>` | [
"Formats",
"an",
"object",
"s",
"attributes",
".",
"Useful",
"for",
"object",
"representation",
"implementation",
".",
"Will",
"skip",
"methods",
"or",
"private",
"attributes",
"."
] | 77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f | https://github.com/eisensheng/kaviar/blob/77ab934a3dd7b1cfabc0ec96acc0b8ed26edcb3f/kaviar/api.py#L54-L81 |
250,065 | praekeltfoundation/seed-service-rating | ratings/views.py | InviteSend.post | def post(self, request, *args, **kwargs):
""" Triggers the task that sends invitation messages
"""
status = 201
accepted = {"accepted": True}
send_invite_messages.apply_async()
return Response(accepted, status=status) | python | def post(self, request, *args, **kwargs):
""" Triggers the task that sends invitation messages
"""
status = 201
accepted = {"accepted": True}
send_invite_messages.apply_async()
return Response(accepted, status=status) | [
"def",
"post",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"status",
"=",
"201",
"accepted",
"=",
"{",
"\"accepted\"",
":",
"True",
"}",
"send_invite_messages",
".",
"apply_async",
"(",
")",
"return",
"Response",
"(",
"accepted",
",",
"status",
"=",
"status",
")"
] | Triggers the task that sends invitation messages | [
"Triggers",
"the",
"task",
"that",
"sends",
"invitation",
"messages"
] | 73f7974a5bcb6e1f32a756be5274b200084c2670 | https://github.com/praekeltfoundation/seed-service-rating/blob/73f7974a5bcb6e1f32a756be5274b200084c2670/ratings/views.py#L59-L65 |
250,066 | praekeltfoundation/seed-service-rating | ratings/views.py | UserView.post | def post(self, request):
'''Create a user and token, given an email. If user exists just
provide the token.'''
serializer = CreateUserSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
email = serializer.validated_data.get('email')
try:
user = User.objects.get(username=email)
except User.DoesNotExist:
user = User.objects.create_user(email, email=email)
token, created = Token.objects.get_or_create(user=user)
return Response(
status=status.HTTP_201_CREATED, data={'token': token.key}) | python | def post(self, request):
'''Create a user and token, given an email. If user exists just
provide the token.'''
serializer = CreateUserSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
email = serializer.validated_data.get('email')
try:
user = User.objects.get(username=email)
except User.DoesNotExist:
user = User.objects.create_user(email, email=email)
token, created = Token.objects.get_or_create(user=user)
return Response(
status=status.HTTP_201_CREATED, data={'token': token.key}) | [
"def",
"post",
"(",
"self",
",",
"request",
")",
":",
"serializer",
"=",
"CreateUserSerializer",
"(",
"data",
"=",
"request",
".",
"data",
")",
"serializer",
".",
"is_valid",
"(",
"raise_exception",
"=",
"True",
")",
"email",
"=",
"serializer",
".",
"validated_data",
".",
"get",
"(",
"'email'",
")",
"try",
":",
"user",
"=",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"email",
")",
"except",
"User",
".",
"DoesNotExist",
":",
"user",
"=",
"User",
".",
"objects",
".",
"create_user",
"(",
"email",
",",
"email",
"=",
"email",
")",
"token",
",",
"created",
"=",
"Token",
".",
"objects",
".",
"get_or_create",
"(",
"user",
"=",
"user",
")",
"return",
"Response",
"(",
"status",
"=",
"status",
".",
"HTTP_201_CREATED",
",",
"data",
"=",
"{",
"'token'",
":",
"token",
".",
"key",
"}",
")"
] | Create a user and token, given an email. If user exists just
provide the token. | [
"Create",
"a",
"user",
"and",
"token",
"given",
"an",
"email",
".",
"If",
"user",
"exists",
"just",
"provide",
"the",
"token",
"."
] | 73f7974a5bcb6e1f32a756be5274b200084c2670 | https://github.com/praekeltfoundation/seed-service-rating/blob/73f7974a5bcb6e1f32a756be5274b200084c2670/ratings/views.py#L94-L108 |
250,067 | djangomini/djangomini | djangomini/controllers.py | Controller.dispatch | def dispatch(self, request, *args, **kwargs):
"""
Redefine parent's method.
Called on each new request from user.
Main difference between Django's approach and ours - we don't push
a 'request' to a method call. We use 'self.request' instead.
"""
# this part copied from django source code
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
# we changed only this line - removed first 'request' argument
return handler(*args, **kwargs) | python | def dispatch(self, request, *args, **kwargs):
"""
Redefine parent's method.
Called on each new request from user.
Main difference between Django's approach and ours - we don't push
a 'request' to a method call. We use 'self.request' instead.
"""
# this part copied from django source code
if request.method.lower() in self.http_method_names:
handler = getattr(self, request.method.lower(),
self.http_method_not_allowed)
else:
handler = self.http_method_not_allowed
# we changed only this line - removed first 'request' argument
return handler(*args, **kwargs) | [
"def",
"dispatch",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# this part copied from django source code",
"if",
"request",
".",
"method",
".",
"lower",
"(",
")",
"in",
"self",
".",
"http_method_names",
":",
"handler",
"=",
"getattr",
"(",
"self",
",",
"request",
".",
"method",
".",
"lower",
"(",
")",
",",
"self",
".",
"http_method_not_allowed",
")",
"else",
":",
"handler",
"=",
"self",
".",
"http_method_not_allowed",
"# we changed only this line - removed first 'request' argument",
"return",
"handler",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | Redefine parent's method.
Called on each new request from user.
Main difference between Django's approach and ours - we don't push
a 'request' to a method call. We use 'self.request' instead. | [
"Redefine",
"parent",
"s",
"method",
"."
] | cfbe2d59acf0e89e5fd442df8952f9a117a63875 | https://github.com/djangomini/djangomini/blob/cfbe2d59acf0e89e5fd442df8952f9a117a63875/djangomini/controllers.py#L21-L37 |
250,068 | djangomini/djangomini | djangomini/controllers.py | Controller.html | def html(self, data=None, template=None):
"""
Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data.
"""
if data is None:
data = {}
if template:
return render(self.request, template, data)
return HttpResponse(data) | python | def html(self, data=None, template=None):
"""
Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data.
"""
if data is None:
data = {}
if template:
return render(self.request, template, data)
return HttpResponse(data) | [
"def",
"html",
"(",
"self",
",",
"data",
"=",
"None",
",",
"template",
"=",
"None",
")",
":",
"if",
"data",
"is",
"None",
":",
"data",
"=",
"{",
"}",
"if",
"template",
":",
"return",
"render",
"(",
"self",
".",
"request",
",",
"template",
",",
"data",
")",
"return",
"HttpResponse",
"(",
"data",
")"
] | Send html document to user.
Args:
- data: Dict to render template, or string with rendered HTML.
- template: Name of template to render HTML document with passed data. | [
"Send",
"html",
"document",
"to",
"user",
"."
] | cfbe2d59acf0e89e5fd442df8952f9a117a63875 | https://github.com/djangomini/djangomini/blob/cfbe2d59acf0e89e5fd442df8952f9a117a63875/djangomini/controllers.py#L39-L51 |
250,069 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | by_skills | def by_skills(queryset, skill_string=None):
""" Filter queryset by a comma delimeted skill list """
if skill_string:
operator, items = get_operator_and_items(skill_string)
q_obj = SQ()
for s in items:
if len(s) > 0:
q_obj.add(SQ(skills=s), operator)
queryset = queryset.filter(q_obj)
return queryset | python | def by_skills(queryset, skill_string=None):
""" Filter queryset by a comma delimeted skill list """
if skill_string:
operator, items = get_operator_and_items(skill_string)
q_obj = SQ()
for s in items:
if len(s) > 0:
q_obj.add(SQ(skills=s), operator)
queryset = queryset.filter(q_obj)
return queryset | [
"def",
"by_skills",
"(",
"queryset",
",",
"skill_string",
"=",
"None",
")",
":",
"if",
"skill_string",
":",
"operator",
",",
"items",
"=",
"get_operator_and_items",
"(",
"skill_string",
")",
"q_obj",
"=",
"SQ",
"(",
")",
"for",
"s",
"in",
"items",
":",
"if",
"len",
"(",
"s",
")",
">",
"0",
":",
"q_obj",
".",
"add",
"(",
"SQ",
"(",
"skills",
"=",
"s",
")",
",",
"operator",
")",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"q_obj",
")",
"return",
"queryset"
] | Filter queryset by a comma delimeted skill list | [
"Filter",
"queryset",
"by",
"a",
"comma",
"delimeted",
"skill",
"list"
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L78-L87 |
250,070 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | by_causes | def by_causes(queryset, cause_string=None):
""" Filter queryset by a comma delimeted cause list """
if cause_string:
operator, items = get_operator_and_items(cause_string)
q_obj = SQ()
for c in items:
if len(c) > 0:
q_obj.add(SQ(causes=c), operator)
queryset = queryset.filter(q_obj)
return queryset | python | def by_causes(queryset, cause_string=None):
""" Filter queryset by a comma delimeted cause list """
if cause_string:
operator, items = get_operator_and_items(cause_string)
q_obj = SQ()
for c in items:
if len(c) > 0:
q_obj.add(SQ(causes=c), operator)
queryset = queryset.filter(q_obj)
return queryset | [
"def",
"by_causes",
"(",
"queryset",
",",
"cause_string",
"=",
"None",
")",
":",
"if",
"cause_string",
":",
"operator",
",",
"items",
"=",
"get_operator_and_items",
"(",
"cause_string",
")",
"q_obj",
"=",
"SQ",
"(",
")",
"for",
"c",
"in",
"items",
":",
"if",
"len",
"(",
"c",
")",
">",
"0",
":",
"q_obj",
".",
"add",
"(",
"SQ",
"(",
"causes",
"=",
"c",
")",
",",
"operator",
")",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"q_obj",
")",
"return",
"queryset"
] | Filter queryset by a comma delimeted cause list | [
"Filter",
"queryset",
"by",
"a",
"comma",
"delimeted",
"cause",
"list"
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L90-L99 |
250,071 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | by_published | def by_published(queryset, published_string='true'):
""" Filter queryset by publish status """
if published_string == 'true':
queryset = queryset.filter(published=1)
elif published_string == 'false':
queryset = queryset.filter(published=0)
# Any other value will return both published and unpublished
return queryset | python | def by_published(queryset, published_string='true'):
""" Filter queryset by publish status """
if published_string == 'true':
queryset = queryset.filter(published=1)
elif published_string == 'false':
queryset = queryset.filter(published=0)
# Any other value will return both published and unpublished
return queryset | [
"def",
"by_published",
"(",
"queryset",
",",
"published_string",
"=",
"'true'",
")",
":",
"if",
"published_string",
"==",
"'true'",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"published",
"=",
"1",
")",
"elif",
"published_string",
"==",
"'false'",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"published",
"=",
"0",
")",
"# Any other value will return both published and unpublished",
"return",
"queryset"
] | Filter queryset by publish status | [
"Filter",
"queryset",
"by",
"publish",
"status"
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L102-L109 |
250,072 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | by_name | def by_name(queryset, name=None):
""" Filter queryset by name, with word wide auto-completion """
if name:
queryset = queryset.filter(name=name)
return queryset | python | def by_name(queryset, name=None):
""" Filter queryset by name, with word wide auto-completion """
if name:
queryset = queryset.filter(name=name)
return queryset | [
"def",
"by_name",
"(",
"queryset",
",",
"name",
"=",
"None",
")",
":",
"if",
"name",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"name",
"=",
"name",
")",
"return",
"queryset"
] | Filter queryset by name, with word wide auto-completion | [
"Filter",
"queryset",
"by",
"name",
"with",
"word",
"wide",
"auto",
"-",
"completion"
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L112-L116 |
250,073 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | by_address | def by_address(queryset, address='', project=False):
"""
Filter queryset by publish status.
If project=True, we also apply a project exclusive filter
"""
if address:
address = json.loads(address)
if u'address_components' in address:
q_objs = []
"""
Caribbean filter
"""
if len(address[u'address_components']):
if address[u'address_components'][0]['long_name'] == 'Caribbean':
queryset = queryset.filter(
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Jamaica', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Haiti', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Saint Lucia', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Suriname', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Trinidad & Tobago', 'country').strip()))
)
return queryset
for component in address[u'address_components']:
q_obj = SQ()
test = ''
for component_type in component[u'types']:
type_string = helpers.whoosh_raw(u"{}-{}".format(component[u'long_name'], component_type).strip())
q_obj.add(SQ(address_components=type_string), SQ.OR)
q_objs.append(q_obj)
# Filter all address components
for obj in q_objs:
queryset = queryset.filter(obj)
else: # remote projects
if project:
queryset = queryset.filter(can_be_done_remotely=1)
return queryset | python | def by_address(queryset, address='', project=False):
"""
Filter queryset by publish status.
If project=True, we also apply a project exclusive filter
"""
if address:
address = json.loads(address)
if u'address_components' in address:
q_objs = []
"""
Caribbean filter
"""
if len(address[u'address_components']):
if address[u'address_components'][0]['long_name'] == 'Caribbean':
queryset = queryset.filter(
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Jamaica', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Haiti', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Saint Lucia', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Suriname', 'country').strip())) |
SQ(address_components=helpers.whoosh_raw(u"{}-{}".format('Trinidad & Tobago', 'country').strip()))
)
return queryset
for component in address[u'address_components']:
q_obj = SQ()
test = ''
for component_type in component[u'types']:
type_string = helpers.whoosh_raw(u"{}-{}".format(component[u'long_name'], component_type).strip())
q_obj.add(SQ(address_components=type_string), SQ.OR)
q_objs.append(q_obj)
# Filter all address components
for obj in q_objs:
queryset = queryset.filter(obj)
else: # remote projects
if project:
queryset = queryset.filter(can_be_done_remotely=1)
return queryset | [
"def",
"by_address",
"(",
"queryset",
",",
"address",
"=",
"''",
",",
"project",
"=",
"False",
")",
":",
"if",
"address",
":",
"address",
"=",
"json",
".",
"loads",
"(",
"address",
")",
"if",
"u'address_components'",
"in",
"address",
":",
"q_objs",
"=",
"[",
"]",
"\"\"\"\n Caribbean filter\n \"\"\"",
"if",
"len",
"(",
"address",
"[",
"u'address_components'",
"]",
")",
":",
"if",
"address",
"[",
"u'address_components'",
"]",
"[",
"0",
"]",
"[",
"'long_name'",
"]",
"==",
"'Caribbean'",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"SQ",
"(",
"address_components",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"'Jamaica'",
",",
"'country'",
")",
".",
"strip",
"(",
")",
")",
")",
"|",
"SQ",
"(",
"address_components",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"'Haiti'",
",",
"'country'",
")",
".",
"strip",
"(",
")",
")",
")",
"|",
"SQ",
"(",
"address_components",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"'Saint Lucia'",
",",
"'country'",
")",
".",
"strip",
"(",
")",
")",
")",
"|",
"SQ",
"(",
"address_components",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"'Suriname'",
",",
"'country'",
")",
".",
"strip",
"(",
")",
")",
")",
"|",
"SQ",
"(",
"address_components",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"'Trinidad & Tobago'",
",",
"'country'",
")",
".",
"strip",
"(",
")",
")",
")",
")",
"return",
"queryset",
"for",
"component",
"in",
"address",
"[",
"u'address_components'",
"]",
":",
"q_obj",
"=",
"SQ",
"(",
")",
"test",
"=",
"''",
"for",
"component_type",
"in",
"component",
"[",
"u'types'",
"]",
":",
"type_string",
"=",
"helpers",
".",
"whoosh_raw",
"(",
"u\"{}-{}\"",
".",
"format",
"(",
"component",
"[",
"u'long_name'",
"]",
",",
"component_type",
")",
".",
"strip",
"(",
")",
")",
"q_obj",
".",
"add",
"(",
"SQ",
"(",
"address_components",
"=",
"type_string",
")",
",",
"SQ",
".",
"OR",
")",
"q_objs",
".",
"append",
"(",
"q_obj",
")",
"# Filter all address components",
"for",
"obj",
"in",
"q_objs",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"obj",
")",
"else",
":",
"# remote projects",
"if",
"project",
":",
"queryset",
"=",
"queryset",
".",
"filter",
"(",
"can_be_done_remotely",
"=",
"1",
")",
"return",
"queryset"
] | Filter queryset by publish status.
If project=True, we also apply a project exclusive filter | [
"Filter",
"queryset",
"by",
"publish",
"status",
"."
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L119-L161 |
250,074 | OpenVolunteeringPlatform/django-ovp-search | ovp_search/filters.py | filter_out | def filter_out(queryset, setting_name):
"""
Remove unwanted results from queryset
"""
kwargs = helpers.get_settings().get(setting_name, {}).get('FILTER_OUT', {})
queryset = queryset.exclude(**kwargs)
return queryset | python | def filter_out(queryset, setting_name):
"""
Remove unwanted results from queryset
"""
kwargs = helpers.get_settings().get(setting_name, {}).get('FILTER_OUT', {})
queryset = queryset.exclude(**kwargs)
return queryset | [
"def",
"filter_out",
"(",
"queryset",
",",
"setting_name",
")",
":",
"kwargs",
"=",
"helpers",
".",
"get_settings",
"(",
")",
".",
"get",
"(",
"setting_name",
",",
"{",
"}",
")",
".",
"get",
"(",
"'FILTER_OUT'",
",",
"{",
"}",
")",
"queryset",
"=",
"queryset",
".",
"exclude",
"(",
"*",
"*",
"kwargs",
")",
"return",
"queryset"
] | Remove unwanted results from queryset | [
"Remove",
"unwanted",
"results",
"from",
"queryset"
] | 003ceecc0a87be31fe8195f65367c52631f72b57 | https://github.com/OpenVolunteeringPlatform/django-ovp-search/blob/003ceecc0a87be31fe8195f65367c52631f72b57/ovp_search/filters.py#L163-L169 |
250,075 | 20tab/twentytab-tree | tree/models.py | list_apps | def list_apps():
"""
It returns a list of application contained in PROJECT_APPS
"""
return [(d.split('.')[-1], d.split('.')[-1]) for d in os.listdir(
os.getcwd()) if is_app(u"{}/{}".format(os.getcwd(), d))] | python | def list_apps():
"""
It returns a list of application contained in PROJECT_APPS
"""
return [(d.split('.')[-1], d.split('.')[-1]) for d in os.listdir(
os.getcwd()) if is_app(u"{}/{}".format(os.getcwd(), d))] | [
"def",
"list_apps",
"(",
")",
":",
"return",
"[",
"(",
"d",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
",",
"d",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
")",
"for",
"d",
"in",
"os",
".",
"listdir",
"(",
"os",
".",
"getcwd",
"(",
")",
")",
"if",
"is_app",
"(",
"u\"{}/{}\"",
".",
"format",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"d",
")",
")",
"]"
] | It returns a list of application contained in PROJECT_APPS | [
"It",
"returns",
"a",
"list",
"of",
"application",
"contained",
"in",
"PROJECT_APPS"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L291-L296 |
250,076 | 20tab/twentytab-tree | tree/models.py | Node.slug | def slug(self):
"""
It returns node's slug
"""
if self.is_root_node():
return ""
if self.slugable and self.parent.parent:
if not self.page.regex or (self.page.regex and not self.page.show_regex) or self.is_leaf_node():
return u"{0}/{1}".format(self.parent.slug, self.page.slug)
elif self.page.regex and self.value_regex and self.page.show_regex:
return u'{0}/{1}/{2}'.format(self.parent.slug, self.page.slug, self.value_regex)
elif not self.hide_in_url:
return u'{0}/{1}'.format(self.parent.slug, self.name)
elif self.slugable:
if not self.page.regex or (self.page.regex and not self.page.show_regex) or self.is_leaf_node():
return u"{0}".format(self.page.slug)
elif self.page.regex and self.value_regex and self.page.show_regex:
return u'{0}/{1}'.format(self.page.slug, self.value_regex)
elif not self.hide_in_url:
return u'{0}'.format(self.name)
return "" | python | def slug(self):
"""
It returns node's slug
"""
if self.is_root_node():
return ""
if self.slugable and self.parent.parent:
if not self.page.regex or (self.page.regex and not self.page.show_regex) or self.is_leaf_node():
return u"{0}/{1}".format(self.parent.slug, self.page.slug)
elif self.page.regex and self.value_regex and self.page.show_regex:
return u'{0}/{1}/{2}'.format(self.parent.slug, self.page.slug, self.value_regex)
elif not self.hide_in_url:
return u'{0}/{1}'.format(self.parent.slug, self.name)
elif self.slugable:
if not self.page.regex or (self.page.regex and not self.page.show_regex) or self.is_leaf_node():
return u"{0}".format(self.page.slug)
elif self.page.regex and self.value_regex and self.page.show_regex:
return u'{0}/{1}'.format(self.page.slug, self.value_regex)
elif not self.hide_in_url:
return u'{0}'.format(self.name)
return "" | [
"def",
"slug",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_root_node",
"(",
")",
":",
"return",
"\"\"",
"if",
"self",
".",
"slugable",
"and",
"self",
".",
"parent",
".",
"parent",
":",
"if",
"not",
"self",
".",
"page",
".",
"regex",
"or",
"(",
"self",
".",
"page",
".",
"regex",
"and",
"not",
"self",
".",
"page",
".",
"show_regex",
")",
"or",
"self",
".",
"is_leaf_node",
"(",
")",
":",
"return",
"u\"{0}/{1}\"",
".",
"format",
"(",
"self",
".",
"parent",
".",
"slug",
",",
"self",
".",
"page",
".",
"slug",
")",
"elif",
"self",
".",
"page",
".",
"regex",
"and",
"self",
".",
"value_regex",
"and",
"self",
".",
"page",
".",
"show_regex",
":",
"return",
"u'{0}/{1}/{2}'",
".",
"format",
"(",
"self",
".",
"parent",
".",
"slug",
",",
"self",
".",
"page",
".",
"slug",
",",
"self",
".",
"value_regex",
")",
"elif",
"not",
"self",
".",
"hide_in_url",
":",
"return",
"u'{0}/{1}'",
".",
"format",
"(",
"self",
".",
"parent",
".",
"slug",
",",
"self",
".",
"name",
")",
"elif",
"self",
".",
"slugable",
":",
"if",
"not",
"self",
".",
"page",
".",
"regex",
"or",
"(",
"self",
".",
"page",
".",
"regex",
"and",
"not",
"self",
".",
"page",
".",
"show_regex",
")",
"or",
"self",
".",
"is_leaf_node",
"(",
")",
":",
"return",
"u\"{0}\"",
".",
"format",
"(",
"self",
".",
"page",
".",
"slug",
")",
"elif",
"self",
".",
"page",
".",
"regex",
"and",
"self",
".",
"value_regex",
"and",
"self",
".",
"page",
".",
"show_regex",
":",
"return",
"u'{0}/{1}'",
".",
"format",
"(",
"self",
".",
"page",
".",
"slug",
",",
"self",
".",
"value_regex",
")",
"elif",
"not",
"self",
".",
"hide_in_url",
":",
"return",
"u'{0}'",
".",
"format",
"(",
"self",
".",
"name",
")",
"return",
"\"\""
] | It returns node's slug | [
"It",
"returns",
"node",
"s",
"slug"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L97-L117 |
250,077 | 20tab/twentytab-tree | tree/models.py | Node.get_pattern | def get_pattern(self):
"""
It returns its url pattern
"""
if self.is_root_node():
return ""
else:
parent_pattern = self.parent.get_pattern()
if parent_pattern != "":
parent_pattern = u"{}".format(parent_pattern)
if not self.page and not self.is_leaf_node():
if self.hide_in_url:
return u'{0}'.format(parent_pattern)
else:
return u'{0}{1}'.format(parent_pattern, self.name)
else:
if self.is_leaf_node() and self.page.regex and self.page.show_regex:
return u'{0}{1}/{2}'.format(parent_pattern, self.page.slug, self.page.regex)
elif self.is_leaf_node() and (not self.page.regex or not self.page.show_regex):
return u'{0}{1}/'.format(parent_pattern, self.page.slug)
elif not self.is_leaf_node() and self.page.regex and self.page.show_regex:
return u'{0}{1}/{2}/'.format(parent_pattern, self.page.slug, self.page.regex)
else:
return u'{0}{1}/'.format(parent_pattern, self.page.slug) | python | def get_pattern(self):
"""
It returns its url pattern
"""
if self.is_root_node():
return ""
else:
parent_pattern = self.parent.get_pattern()
if parent_pattern != "":
parent_pattern = u"{}".format(parent_pattern)
if not self.page and not self.is_leaf_node():
if self.hide_in_url:
return u'{0}'.format(parent_pattern)
else:
return u'{0}{1}'.format(parent_pattern, self.name)
else:
if self.is_leaf_node() and self.page.regex and self.page.show_regex:
return u'{0}{1}/{2}'.format(parent_pattern, self.page.slug, self.page.regex)
elif self.is_leaf_node() and (not self.page.regex or not self.page.show_regex):
return u'{0}{1}/'.format(parent_pattern, self.page.slug)
elif not self.is_leaf_node() and self.page.regex and self.page.show_regex:
return u'{0}{1}/{2}/'.format(parent_pattern, self.page.slug, self.page.regex)
else:
return u'{0}{1}/'.format(parent_pattern, self.page.slug) | [
"def",
"get_pattern",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_root_node",
"(",
")",
":",
"return",
"\"\"",
"else",
":",
"parent_pattern",
"=",
"self",
".",
"parent",
".",
"get_pattern",
"(",
")",
"if",
"parent_pattern",
"!=",
"\"\"",
":",
"parent_pattern",
"=",
"u\"{}\"",
".",
"format",
"(",
"parent_pattern",
")",
"if",
"not",
"self",
".",
"page",
"and",
"not",
"self",
".",
"is_leaf_node",
"(",
")",
":",
"if",
"self",
".",
"hide_in_url",
":",
"return",
"u'{0}'",
".",
"format",
"(",
"parent_pattern",
")",
"else",
":",
"return",
"u'{0}{1}'",
".",
"format",
"(",
"parent_pattern",
",",
"self",
".",
"name",
")",
"else",
":",
"if",
"self",
".",
"is_leaf_node",
"(",
")",
"and",
"self",
".",
"page",
".",
"regex",
"and",
"self",
".",
"page",
".",
"show_regex",
":",
"return",
"u'{0}{1}/{2}'",
".",
"format",
"(",
"parent_pattern",
",",
"self",
".",
"page",
".",
"slug",
",",
"self",
".",
"page",
".",
"regex",
")",
"elif",
"self",
".",
"is_leaf_node",
"(",
")",
"and",
"(",
"not",
"self",
".",
"page",
".",
"regex",
"or",
"not",
"self",
".",
"page",
".",
"show_regex",
")",
":",
"return",
"u'{0}{1}/'",
".",
"format",
"(",
"parent_pattern",
",",
"self",
".",
"page",
".",
"slug",
")",
"elif",
"not",
"self",
".",
"is_leaf_node",
"(",
")",
"and",
"self",
".",
"page",
".",
"regex",
"and",
"self",
".",
"page",
".",
"show_regex",
":",
"return",
"u'{0}{1}/{2}/'",
".",
"format",
"(",
"parent_pattern",
",",
"self",
".",
"page",
".",
"slug",
",",
"self",
".",
"page",
".",
"regex",
")",
"else",
":",
"return",
"u'{0}{1}/'",
".",
"format",
"(",
"parent_pattern",
",",
"self",
".",
"page",
".",
"slug",
")"
] | It returns its url pattern | [
"It",
"returns",
"its",
"url",
"pattern"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L142-L165 |
250,078 | 20tab/twentytab-tree | tree/models.py | Node.presentation_type | def presentation_type(self):
"""
It returns page's presentation_type
"""
if self.page and self.page.presentation_type:
return self.page.presentation_type
return "" | python | def presentation_type(self):
"""
It returns page's presentation_type
"""
if self.page and self.page.presentation_type:
return self.page.presentation_type
return "" | [
"def",
"presentation_type",
"(",
"self",
")",
":",
"if",
"self",
".",
"page",
"and",
"self",
".",
"page",
".",
"presentation_type",
":",
"return",
"self",
".",
"page",
".",
"presentation_type",
"return",
"\"\""
] | It returns page's presentation_type | [
"It",
"returns",
"page",
"s",
"presentation_type"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L174-L180 |
250,079 | 20tab/twentytab-tree | tree/models.py | Page.view_path | def view_path(self):
"""
It returns view's view path
"""
if self.scheme_name is None or self.scheme_name == "":
return self.view.view_path
else:
return self.scheme_name | python | def view_path(self):
"""
It returns view's view path
"""
if self.scheme_name is None or self.scheme_name == "":
return self.view.view_path
else:
return self.scheme_name | [
"def",
"view_path",
"(",
"self",
")",
":",
"if",
"self",
".",
"scheme_name",
"is",
"None",
"or",
"self",
".",
"scheme_name",
"==",
"\"\"",
":",
"return",
"self",
".",
"view",
".",
"view_path",
"else",
":",
"return",
"self",
".",
"scheme_name"
] | It returns view's view path | [
"It",
"returns",
"view",
"s",
"view",
"path"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L238-L245 |
250,080 | 20tab/twentytab-tree | tree/models.py | Page.get_absolute_url | def get_absolute_url(self):
"""
It returns absolute url defined by node related to this page
"""
try:
node = Node.objects.select_related().filter(page=self)[0]
return node.get_absolute_url()
except Exception, e:
raise ValueError(u"Error in {0}.{1}: {2}".format(self.__module__, self.__class__.__name__, e))
return u"" | python | def get_absolute_url(self):
"""
It returns absolute url defined by node related to this page
"""
try:
node = Node.objects.select_related().filter(page=self)[0]
return node.get_absolute_url()
except Exception, e:
raise ValueError(u"Error in {0}.{1}: {2}".format(self.__module__, self.__class__.__name__, e))
return u"" | [
"def",
"get_absolute_url",
"(",
"self",
")",
":",
"try",
":",
"node",
"=",
"Node",
".",
"objects",
".",
"select_related",
"(",
")",
".",
"filter",
"(",
"page",
"=",
"self",
")",
"[",
"0",
"]",
"return",
"node",
".",
"get_absolute_url",
"(",
")",
"except",
"Exception",
",",
"e",
":",
"raise",
"ValueError",
"(",
"u\"Error in {0}.{1}: {2}\"",
".",
"format",
"(",
"self",
".",
"__module__",
",",
"self",
".",
"__class__",
".",
"__name__",
",",
"e",
")",
")",
"return",
"u\"\""
] | It returns absolute url defined by node related to this page | [
"It",
"returns",
"absolute",
"url",
"defined",
"by",
"node",
"related",
"to",
"this",
"page"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L247-L256 |
250,081 | 20tab/twentytab-tree | tree/models.py | Page.check_static_vars | def check_static_vars(self, node):
"""
This function check if a Page has static vars
"""
if self.static_vars == "" and hasattr(self, "template"):
self.static_vars = {
'upy_context': {
'template_name': u"{}/{}".format(self.template.app_name, self.template.file_name)
}
}
elif hasattr(self, "template"):
self.static_vars = literal_eval(self.static_vars)
self.static_vars['upy_context']['template_name'] = u"{}/{}".format(
self.template.app_name, self.template.file_name
)
self.static_vars['upy_context']['NODE'] = node
self.static_vars['upy_context']['PAGE'] = self | python | def check_static_vars(self, node):
"""
This function check if a Page has static vars
"""
if self.static_vars == "" and hasattr(self, "template"):
self.static_vars = {
'upy_context': {
'template_name': u"{}/{}".format(self.template.app_name, self.template.file_name)
}
}
elif hasattr(self, "template"):
self.static_vars = literal_eval(self.static_vars)
self.static_vars['upy_context']['template_name'] = u"{}/{}".format(
self.template.app_name, self.template.file_name
)
self.static_vars['upy_context']['NODE'] = node
self.static_vars['upy_context']['PAGE'] = self | [
"def",
"check_static_vars",
"(",
"self",
",",
"node",
")",
":",
"if",
"self",
".",
"static_vars",
"==",
"\"\"",
"and",
"hasattr",
"(",
"self",
",",
"\"template\"",
")",
":",
"self",
".",
"static_vars",
"=",
"{",
"'upy_context'",
":",
"{",
"'template_name'",
":",
"u\"{}/{}\"",
".",
"format",
"(",
"self",
".",
"template",
".",
"app_name",
",",
"self",
".",
"template",
".",
"file_name",
")",
"}",
"}",
"elif",
"hasattr",
"(",
"self",
",",
"\"template\"",
")",
":",
"self",
".",
"static_vars",
"=",
"literal_eval",
"(",
"self",
".",
"static_vars",
")",
"self",
".",
"static_vars",
"[",
"'upy_context'",
"]",
"[",
"'template_name'",
"]",
"=",
"u\"{}/{}\"",
".",
"format",
"(",
"self",
".",
"template",
".",
"app_name",
",",
"self",
".",
"template",
".",
"file_name",
")",
"self",
".",
"static_vars",
"[",
"'upy_context'",
"]",
"[",
"'NODE'",
"]",
"=",
"node",
"self",
".",
"static_vars",
"[",
"'upy_context'",
"]",
"[",
"'PAGE'",
"]",
"=",
"self"
] | This function check if a Page has static vars | [
"This",
"function",
"check",
"if",
"a",
"Page",
"has",
"static",
"vars"
] | f2c1ced33e6c211bb52a25a7d48155e39fbdc088 | https://github.com/20tab/twentytab-tree/blob/f2c1ced33e6c211bb52a25a7d48155e39fbdc088/tree/models.py#L258-L274 |
250,082 | jamieleshaw/lurklib | lurklib/optional.py | _Optional.rehash | def rehash(self):
"""
Rehashes the IRCd's configuration file.
"""
with self.lock:
self.send('REHASH')
if self.readable():
msg = self._recv(expected_replies=('382',))
if msg[0] == '382':
pass | python | def rehash(self):
"""
Rehashes the IRCd's configuration file.
"""
with self.lock:
self.send('REHASH')
if self.readable():
msg = self._recv(expected_replies=('382',))
if msg[0] == '382':
pass | [
"def",
"rehash",
"(",
"self",
")",
":",
"with",
"self",
".",
"lock",
":",
"self",
".",
"send",
"(",
"'REHASH'",
")",
"if",
"self",
".",
"readable",
"(",
")",
":",
"msg",
"=",
"self",
".",
"_recv",
"(",
"expected_replies",
"=",
"(",
"'382'",
",",
")",
")",
"if",
"msg",
"[",
"0",
"]",
"==",
"'382'",
":",
"pass"
] | Rehashes the IRCd's configuration file. | [
"Rehashes",
"the",
"IRCd",
"s",
"configuration",
"file",
"."
] | a861f35d880140422103dd78ec3239814e85fd7e | https://github.com/jamieleshaw/lurklib/blob/a861f35d880140422103dd78ec3239814e85fd7e/lurklib/optional.py#L39-L48 |
250,083 | globocom/globomap-loader-api-client | globomap_loader_api_client/auth.py | Auth.generate_token | def generate_token(self):
"""Make request in API to generate a token."""
response = self._make_request()
self.auth = response
self.token = response['token'] | python | def generate_token(self):
"""Make request in API to generate a token."""
response = self._make_request()
self.auth = response
self.token = response['token'] | [
"def",
"generate_token",
"(",
"self",
")",
":",
"response",
"=",
"self",
".",
"_make_request",
"(",
")",
"self",
".",
"auth",
"=",
"response",
"self",
".",
"token",
"=",
"response",
"[",
"'token'",
"]"
] | Make request in API to generate a token. | [
"Make",
"request",
"in",
"API",
"to",
"generate",
"a",
"token",
"."
] | b12347ca77d245de1abd604d1b694162156570e6 | https://github.com/globocom/globomap-loader-api-client/blob/b12347ca77d245de1abd604d1b694162156570e6/globomap_loader_api_client/auth.py#L45-L50 |
250,084 | ulf1/oxyba | oxyba/linreg_ols_svd.py | linreg_ols_svd | def linreg_ols_svd(y, X, rcond=1e-15):
"""Linear Regression, OLS, inv by SVD
Properties
----------
* Numpy's lstsq is based on LAPACK's _gelsd what applies SVD
* SVD inverse might be slow (complex Landau O)
* speed might decline during forward selection
* no overhead or other computations
Example:
--------
beta = lin_ols_svd(y,X)
"""
import numpy as np
try: # solve OLS formula
beta, _, _, singu = np.linalg.lstsq(b=y, a=X, rcond=rcond)
except np.linalg.LinAlgError:
print("LinAlgError: computation does not converge.")
return None
# check singu
if np.any(singu < 0.0):
print("Error: A singular value of X is numerically not well-behaved.")
return None
# return estimated model parameters
return beta | python | def linreg_ols_svd(y, X, rcond=1e-15):
"""Linear Regression, OLS, inv by SVD
Properties
----------
* Numpy's lstsq is based on LAPACK's _gelsd what applies SVD
* SVD inverse might be slow (complex Landau O)
* speed might decline during forward selection
* no overhead or other computations
Example:
--------
beta = lin_ols_svd(y,X)
"""
import numpy as np
try: # solve OLS formula
beta, _, _, singu = np.linalg.lstsq(b=y, a=X, rcond=rcond)
except np.linalg.LinAlgError:
print("LinAlgError: computation does not converge.")
return None
# check singu
if np.any(singu < 0.0):
print("Error: A singular value of X is numerically not well-behaved.")
return None
# return estimated model parameters
return beta | [
"def",
"linreg_ols_svd",
"(",
"y",
",",
"X",
",",
"rcond",
"=",
"1e-15",
")",
":",
"import",
"numpy",
"as",
"np",
"try",
":",
"# solve OLS formula",
"beta",
",",
"_",
",",
"_",
",",
"singu",
"=",
"np",
".",
"linalg",
".",
"lstsq",
"(",
"b",
"=",
"y",
",",
"a",
"=",
"X",
",",
"rcond",
"=",
"rcond",
")",
"except",
"np",
".",
"linalg",
".",
"LinAlgError",
":",
"print",
"(",
"\"LinAlgError: computation does not converge.\"",
")",
"return",
"None",
"# check singu",
"if",
"np",
".",
"any",
"(",
"singu",
"<",
"0.0",
")",
":",
"print",
"(",
"\"Error: A singular value of X is numerically not well-behaved.\"",
")",
"return",
"None",
"# return estimated model parameters",
"return",
"beta"
] | Linear Regression, OLS, inv by SVD
Properties
----------
* Numpy's lstsq is based on LAPACK's _gelsd what applies SVD
* SVD inverse might be slow (complex Landau O)
* speed might decline during forward selection
* no overhead or other computations
Example:
--------
beta = lin_ols_svd(y,X) | [
"Linear",
"Regression",
"OLS",
"inv",
"by",
"SVD"
] | b3043116050de275124365cb11e7df91fb40169d | https://github.com/ulf1/oxyba/blob/b3043116050de275124365cb11e7df91fb40169d/oxyba/linreg_ols_svd.py#L2-L29 |
250,085 | b3j0f/conf | b3j0f/conf/parser/resolver/lang/py.py | genrepl | def genrepl(scope):
"""Replacement function with specific scope."""
def repl(match):
"""Internal replacement function."""
name = match.group('name')
value = lookup(name, scope=scope)
result = name.replace('.', '_')
scope[result] = value
return result
return repl | python | def genrepl(scope):
"""Replacement function with specific scope."""
def repl(match):
"""Internal replacement function."""
name = match.group('name')
value = lookup(name, scope=scope)
result = name.replace('.', '_')
scope[result] = value
return result
return repl | [
"def",
"genrepl",
"(",
"scope",
")",
":",
"def",
"repl",
"(",
"match",
")",
":",
"\"\"\"Internal replacement function.\"\"\"",
"name",
"=",
"match",
".",
"group",
"(",
"'name'",
")",
"value",
"=",
"lookup",
"(",
"name",
",",
"scope",
"=",
"scope",
")",
"result",
"=",
"name",
".",
"replace",
"(",
"'.'",
",",
"'_'",
")",
"scope",
"[",
"result",
"]",
"=",
"value",
"return",
"result",
"return",
"repl"
] | Replacement function with specific scope. | [
"Replacement",
"function",
"with",
"specific",
"scope",
"."
] | 18dd6d5d6560f9b202793739e2330a2181163511 | https://github.com/b3j0f/conf/blob/18dd6d5d6560f9b202793739e2330a2181163511/b3j0f/conf/parser/resolver/lang/py.py#L48-L63 |
250,086 | bcho/yufou | yufou/radar.py | image | def image(radar, at=None):
'''Retrieve a radar image.
:param radar: radar station no.
:param at: stat datetime, defaults to now.
'''
at = round_to_5_minutes(at or datetime.utcnow())
return ''.join([
'http://image.nmc.cn/product',
'/{0}'.format(at.year),
'/{0}'.format(at.strftime('%Y%m')),
'/{0}'.format(at.strftime('%Y%m%d')),
'/RDCP/medium/SEVP_AOC_RDCP_SLDAS_EBREF_',
'{0}_L88_PI_'.format(radar),
'{0}00000.GIF'.format(at.strftime('%Y%m%d%H%M'))
]) | python | def image(radar, at=None):
'''Retrieve a radar image.
:param radar: radar station no.
:param at: stat datetime, defaults to now.
'''
at = round_to_5_minutes(at or datetime.utcnow())
return ''.join([
'http://image.nmc.cn/product',
'/{0}'.format(at.year),
'/{0}'.format(at.strftime('%Y%m')),
'/{0}'.format(at.strftime('%Y%m%d')),
'/RDCP/medium/SEVP_AOC_RDCP_SLDAS_EBREF_',
'{0}_L88_PI_'.format(radar),
'{0}00000.GIF'.format(at.strftime('%Y%m%d%H%M'))
]) | [
"def",
"image",
"(",
"radar",
",",
"at",
"=",
"None",
")",
":",
"at",
"=",
"round_to_5_minutes",
"(",
"at",
"or",
"datetime",
".",
"utcnow",
"(",
")",
")",
"return",
"''",
".",
"join",
"(",
"[",
"'http://image.nmc.cn/product'",
",",
"'/{0}'",
".",
"format",
"(",
"at",
".",
"year",
")",
",",
"'/{0}'",
".",
"format",
"(",
"at",
".",
"strftime",
"(",
"'%Y%m'",
")",
")",
",",
"'/{0}'",
".",
"format",
"(",
"at",
".",
"strftime",
"(",
"'%Y%m%d'",
")",
")",
",",
"'/RDCP/medium/SEVP_AOC_RDCP_SLDAS_EBREF_'",
",",
"'{0}_L88_PI_'",
".",
"format",
"(",
"radar",
")",
",",
"'{0}00000.GIF'",
".",
"format",
"(",
"at",
".",
"strftime",
"(",
"'%Y%m%d%H%M'",
")",
")",
"]",
")"
] | Retrieve a radar image.
:param radar: radar station no.
:param at: stat datetime, defaults to now. | [
"Retrieve",
"a",
"radar",
"image",
"."
] | 008e38468f17cf6bc616b30b944bb9395dbaface | https://github.com/bcho/yufou/blob/008e38468f17cf6bc616b30b944bb9395dbaface/yufou/radar.py#L26-L42 |
250,087 | nickmilon/Hellas | Hellas/Olympia.py | pickle_compress | def pickle_compress(obj, print_compression_info=False):
"""pickle and compress an object"""
p = pickle.dumps(obj)
c = zlib.compress(p)
if print_compression_info:
print ("len = {:,d} compr={:,d} ratio:{:.6f}".format(len(p), len(c), float(len(c))/len(p)))
return c | python | def pickle_compress(obj, print_compression_info=False):
"""pickle and compress an object"""
p = pickle.dumps(obj)
c = zlib.compress(p)
if print_compression_info:
print ("len = {:,d} compr={:,d} ratio:{:.6f}".format(len(p), len(c), float(len(c))/len(p)))
return c | [
"def",
"pickle_compress",
"(",
"obj",
",",
"print_compression_info",
"=",
"False",
")",
":",
"p",
"=",
"pickle",
".",
"dumps",
"(",
"obj",
")",
"c",
"=",
"zlib",
".",
"compress",
"(",
"p",
")",
"if",
"print_compression_info",
":",
"print",
"(",
"\"len = {:,d} compr={:,d} ratio:{:.6f}\"",
".",
"format",
"(",
"len",
"(",
"p",
")",
",",
"len",
"(",
"c",
")",
",",
"float",
"(",
"len",
"(",
"c",
")",
")",
"/",
"len",
"(",
"p",
")",
")",
")",
"return",
"c"
] | pickle and compress an object | [
"pickle",
"and",
"compress",
"an",
"object"
] | 542e4778692fbec90753942946f20100412ec9ee | https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Olympia.py#L9-L15 |
250,088 | ahobsonsayers/bthomehub5-devicelist | bthomehub5_devicelist/bthomehub5_devicelist.py | get_devicelist | def get_devicelist(home_hub_ip='192.168.1.254'):
"""Retrieve data from BT Home Hub 5 and return parsed result.
"""
url = 'http://{}/'.format(home_hub_ip)
try:
response = requests.get(url, timeout=5)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out")
return
if response.status_code == 200:
return parse_devicelist(response.text)
else:
_LOGGER.error("Invalid response from Home Hub: %s", response) | python | def get_devicelist(home_hub_ip='192.168.1.254'):
"""Retrieve data from BT Home Hub 5 and return parsed result.
"""
url = 'http://{}/'.format(home_hub_ip)
try:
response = requests.get(url, timeout=5)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out")
return
if response.status_code == 200:
return parse_devicelist(response.text)
else:
_LOGGER.error("Invalid response from Home Hub: %s", response) | [
"def",
"get_devicelist",
"(",
"home_hub_ip",
"=",
"'192.168.1.254'",
")",
":",
"url",
"=",
"'http://{}/'",
".",
"format",
"(",
"home_hub_ip",
")",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"5",
")",
"except",
"requests",
".",
"exceptions",
".",
"Timeout",
":",
"_LOGGER",
".",
"exception",
"(",
"\"Connection to the router timed out\"",
")",
"return",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"return",
"parse_devicelist",
"(",
"response",
".",
"text",
")",
"else",
":",
"_LOGGER",
".",
"error",
"(",
"\"Invalid response from Home Hub: %s\"",
",",
"response",
")"
] | Retrieve data from BT Home Hub 5 and return parsed result. | [
"Retrieve",
"data",
"from",
"BT",
"Home",
"Hub",
"5",
"and",
"return",
"parsed",
"result",
"."
] | 941b553fab7ce49b0c7ff7f1e10023d0a212d455 | https://github.com/ahobsonsayers/bthomehub5-devicelist/blob/941b553fab7ce49b0c7ff7f1e10023d0a212d455/bthomehub5_devicelist/bthomehub5_devicelist.py#L8-L22 |
250,089 | ahobsonsayers/bthomehub5-devicelist | bthomehub5_devicelist/bthomehub5_devicelist.py | parse_devicelist | def parse_devicelist(data_str):
"""Parse the BT Home Hub 5 data format."""
p = HTMLTableParser()
p.feed(data_str)
known_devices = p.tables[9]
devices = {}
for device in known_devices:
if len(device) == 5 and device[2] != '':
devices[device[2]] = device[1]
return devices | python | def parse_devicelist(data_str):
"""Parse the BT Home Hub 5 data format."""
p = HTMLTableParser()
p.feed(data_str)
known_devices = p.tables[9]
devices = {}
for device in known_devices:
if len(device) == 5 and device[2] != '':
devices[device[2]] = device[1]
return devices | [
"def",
"parse_devicelist",
"(",
"data_str",
")",
":",
"p",
"=",
"HTMLTableParser",
"(",
")",
"p",
".",
"feed",
"(",
"data_str",
")",
"known_devices",
"=",
"p",
".",
"tables",
"[",
"9",
"]",
"devices",
"=",
"{",
"}",
"for",
"device",
"in",
"known_devices",
":",
"if",
"len",
"(",
"device",
")",
"==",
"5",
"and",
"device",
"[",
"2",
"]",
"!=",
"''",
":",
"devices",
"[",
"device",
"[",
"2",
"]",
"]",
"=",
"device",
"[",
"1",
"]",
"return",
"devices"
] | Parse the BT Home Hub 5 data format. | [
"Parse",
"the",
"BT",
"Home",
"Hub",
"5",
"data",
"format",
"."
] | 941b553fab7ce49b0c7ff7f1e10023d0a212d455 | https://github.com/ahobsonsayers/bthomehub5-devicelist/blob/941b553fab7ce49b0c7ff7f1e10023d0a212d455/bthomehub5_devicelist/bthomehub5_devicelist.py#L25-L39 |
250,090 | minhhoit/yacms | yacms/generic/templatetags/comment_tags.py | comments_for | def comments_for(context, obj):
"""
Provides a generic context variable name for the object that
comments are being rendered for.
"""
form_class = import_dotted_path(settings.COMMENT_FORM_CLASS)
form = form_class(context["request"], obj)
context_form = context.get("posted_comment_form", form)
context.update({
'posted_comment_form':
context_form if context_form.target_object == obj else form,
'unposted_comment_form': form,
'comment_url': reverse("comment"),
'object_for_comments': obj,
})
return context | python | def comments_for(context, obj):
"""
Provides a generic context variable name for the object that
comments are being rendered for.
"""
form_class = import_dotted_path(settings.COMMENT_FORM_CLASS)
form = form_class(context["request"], obj)
context_form = context.get("posted_comment_form", form)
context.update({
'posted_comment_form':
context_form if context_form.target_object == obj else form,
'unposted_comment_form': form,
'comment_url': reverse("comment"),
'object_for_comments': obj,
})
return context | [
"def",
"comments_for",
"(",
"context",
",",
"obj",
")",
":",
"form_class",
"=",
"import_dotted_path",
"(",
"settings",
".",
"COMMENT_FORM_CLASS",
")",
"form",
"=",
"form_class",
"(",
"context",
"[",
"\"request\"",
"]",
",",
"obj",
")",
"context_form",
"=",
"context",
".",
"get",
"(",
"\"posted_comment_form\"",
",",
"form",
")",
"context",
".",
"update",
"(",
"{",
"'posted_comment_form'",
":",
"context_form",
"if",
"context_form",
".",
"target_object",
"==",
"obj",
"else",
"form",
",",
"'unposted_comment_form'",
":",
"form",
",",
"'comment_url'",
":",
"reverse",
"(",
"\"comment\"",
")",
",",
"'object_for_comments'",
":",
"obj",
",",
"}",
")",
"return",
"context"
] | Provides a generic context variable name for the object that
comments are being rendered for. | [
"Provides",
"a",
"generic",
"context",
"variable",
"name",
"for",
"the",
"object",
"that",
"comments",
"are",
"being",
"rendered",
"for",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/generic/templatetags/comment_tags.py#L19-L34 |
250,091 | minhhoit/yacms | yacms/generic/templatetags/comment_tags.py | comment_thread | def comment_thread(context, parent):
"""
Return a list of child comments for the given parent, storing all
comments in a dict in the context when first called, using parents
as keys for retrieval on subsequent recursive calls from the
comments template.
"""
if "all_comments" not in context:
comments = defaultdict(list)
if "request" in context and context["request"].user.is_staff:
comments_queryset = parent.comments.all()
else:
comments_queryset = parent.comments.visible()
for comment in comments_queryset.select_related("user"):
comments[comment.replied_to_id].append(comment)
context["all_comments"] = comments
parent_id = parent.id if isinstance(parent, ThreadedComment) else None
try:
replied_to = int(context["request"].POST["replied_to"])
except KeyError:
replied_to = 0
context.update({
"comments_for_thread": context["all_comments"].get(parent_id, []),
"no_comments": parent_id is None and not context["all_comments"],
"replied_to": replied_to,
})
return context | python | def comment_thread(context, parent):
"""
Return a list of child comments for the given parent, storing all
comments in a dict in the context when first called, using parents
as keys for retrieval on subsequent recursive calls from the
comments template.
"""
if "all_comments" not in context:
comments = defaultdict(list)
if "request" in context and context["request"].user.is_staff:
comments_queryset = parent.comments.all()
else:
comments_queryset = parent.comments.visible()
for comment in comments_queryset.select_related("user"):
comments[comment.replied_to_id].append(comment)
context["all_comments"] = comments
parent_id = parent.id if isinstance(parent, ThreadedComment) else None
try:
replied_to = int(context["request"].POST["replied_to"])
except KeyError:
replied_to = 0
context.update({
"comments_for_thread": context["all_comments"].get(parent_id, []),
"no_comments": parent_id is None and not context["all_comments"],
"replied_to": replied_to,
})
return context | [
"def",
"comment_thread",
"(",
"context",
",",
"parent",
")",
":",
"if",
"\"all_comments\"",
"not",
"in",
"context",
":",
"comments",
"=",
"defaultdict",
"(",
"list",
")",
"if",
"\"request\"",
"in",
"context",
"and",
"context",
"[",
"\"request\"",
"]",
".",
"user",
".",
"is_staff",
":",
"comments_queryset",
"=",
"parent",
".",
"comments",
".",
"all",
"(",
")",
"else",
":",
"comments_queryset",
"=",
"parent",
".",
"comments",
".",
"visible",
"(",
")",
"for",
"comment",
"in",
"comments_queryset",
".",
"select_related",
"(",
"\"user\"",
")",
":",
"comments",
"[",
"comment",
".",
"replied_to_id",
"]",
".",
"append",
"(",
"comment",
")",
"context",
"[",
"\"all_comments\"",
"]",
"=",
"comments",
"parent_id",
"=",
"parent",
".",
"id",
"if",
"isinstance",
"(",
"parent",
",",
"ThreadedComment",
")",
"else",
"None",
"try",
":",
"replied_to",
"=",
"int",
"(",
"context",
"[",
"\"request\"",
"]",
".",
"POST",
"[",
"\"replied_to\"",
"]",
")",
"except",
"KeyError",
":",
"replied_to",
"=",
"0",
"context",
".",
"update",
"(",
"{",
"\"comments_for_thread\"",
":",
"context",
"[",
"\"all_comments\"",
"]",
".",
"get",
"(",
"parent_id",
",",
"[",
"]",
")",
",",
"\"no_comments\"",
":",
"parent_id",
"is",
"None",
"and",
"not",
"context",
"[",
"\"all_comments\"",
"]",
",",
"\"replied_to\"",
":",
"replied_to",
",",
"}",
")",
"return",
"context"
] | Return a list of child comments for the given parent, storing all
comments in a dict in the context when first called, using parents
as keys for retrieval on subsequent recursive calls from the
comments template. | [
"Return",
"a",
"list",
"of",
"child",
"comments",
"for",
"the",
"given",
"parent",
"storing",
"all",
"comments",
"in",
"a",
"dict",
"in",
"the",
"context",
"when",
"first",
"called",
"using",
"parents",
"as",
"keys",
"for",
"retrieval",
"on",
"subsequent",
"recursive",
"calls",
"from",
"the",
"comments",
"template",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/generic/templatetags/comment_tags.py#L38-L64 |
250,092 | minhhoit/yacms | yacms/generic/templatetags/comment_tags.py | recent_comments | def recent_comments(context):
"""
Dashboard widget for displaying recent comments.
"""
latest = context["settings"].COMMENTS_NUM_LATEST
comments = ThreadedComment.objects.all().select_related("user")
context["comments"] = comments.order_by("-id")[:latest]
return context | python | def recent_comments(context):
"""
Dashboard widget for displaying recent comments.
"""
latest = context["settings"].COMMENTS_NUM_LATEST
comments = ThreadedComment.objects.all().select_related("user")
context["comments"] = comments.order_by("-id")[:latest]
return context | [
"def",
"recent_comments",
"(",
"context",
")",
":",
"latest",
"=",
"context",
"[",
"\"settings\"",
"]",
".",
"COMMENTS_NUM_LATEST",
"comments",
"=",
"ThreadedComment",
".",
"objects",
".",
"all",
"(",
")",
".",
"select_related",
"(",
"\"user\"",
")",
"context",
"[",
"\"comments\"",
"]",
"=",
"comments",
".",
"order_by",
"(",
"\"-id\"",
")",
"[",
":",
"latest",
"]",
"return",
"context"
] | Dashboard widget for displaying recent comments. | [
"Dashboard",
"widget",
"for",
"displaying",
"recent",
"comments",
"."
] | 2921b706b7107c6e8c5f2bbf790ff11f85a2167f | https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/generic/templatetags/comment_tags.py#L69-L76 |
250,093 | kalekundert/nonstdlib | nonstdlib/debug.py | log_level | def log_level(level):
"""
Attempt to convert the given argument into a log level.
Log levels are represented as integers, where higher values are more
severe. If the given level is already an integer, it is simply returned.
If the given level is a string that can be converted into an integer, it is
converted and that value is returned. Finally, if the given level is a
string naming one of the levels defined in the logging module, return that
level. If none of those conditions are met, raise a ValueError.
"""
from six import string_types
if isinstance(level, int):
return level
if isinstance(level, string_types):
try: return int(level)
except ValueError: pass
try: return getattr(logging, level.upper())
except AttributeError: pass
raise ValueError("cannot convert '{}' into a log level".format(level)) | python | def log_level(level):
"""
Attempt to convert the given argument into a log level.
Log levels are represented as integers, where higher values are more
severe. If the given level is already an integer, it is simply returned.
If the given level is a string that can be converted into an integer, it is
converted and that value is returned. Finally, if the given level is a
string naming one of the levels defined in the logging module, return that
level. If none of those conditions are met, raise a ValueError.
"""
from six import string_types
if isinstance(level, int):
return level
if isinstance(level, string_types):
try: return int(level)
except ValueError: pass
try: return getattr(logging, level.upper())
except AttributeError: pass
raise ValueError("cannot convert '{}' into a log level".format(level)) | [
"def",
"log_level",
"(",
"level",
")",
":",
"from",
"six",
"import",
"string_types",
"if",
"isinstance",
"(",
"level",
",",
"int",
")",
":",
"return",
"level",
"if",
"isinstance",
"(",
"level",
",",
"string_types",
")",
":",
"try",
":",
"return",
"int",
"(",
"level",
")",
"except",
"ValueError",
":",
"pass",
"try",
":",
"return",
"getattr",
"(",
"logging",
",",
"level",
".",
"upper",
"(",
")",
")",
"except",
"AttributeError",
":",
"pass",
"raise",
"ValueError",
"(",
"\"cannot convert '{}' into a log level\"",
".",
"format",
"(",
"level",
")",
")"
] | Attempt to convert the given argument into a log level.
Log levels are represented as integers, where higher values are more
severe. If the given level is already an integer, it is simply returned.
If the given level is a string that can be converted into an integer, it is
converted and that value is returned. Finally, if the given level is a
string naming one of the levels defined in the logging module, return that
level. If none of those conditions are met, raise a ValueError. | [
"Attempt",
"to",
"convert",
"the",
"given",
"argument",
"into",
"a",
"log",
"level",
"."
] | 3abf4a4680056d6d97f2a5988972eb9392756fb6 | https://github.com/kalekundert/nonstdlib/blob/3abf4a4680056d6d97f2a5988972eb9392756fb6/nonstdlib/debug.py#L11-L34 |
250,094 | kalekundert/nonstdlib | nonstdlib/debug.py | verbosity | def verbosity(verbosity):
"""
Convert the number of times the user specified '-v' on the command-line
into a log level.
"""
verbosity = int(verbosity)
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
if verbosity == 2:
return logging.DEBUG
if verbosity >= 3:
return 0
else:
raise ValueError | python | def verbosity(verbosity):
"""
Convert the number of times the user specified '-v' on the command-line
into a log level.
"""
verbosity = int(verbosity)
if verbosity == 0:
return logging.WARNING
if verbosity == 1:
return logging.INFO
if verbosity == 2:
return logging.DEBUG
if verbosity >= 3:
return 0
else:
raise ValueError | [
"def",
"verbosity",
"(",
"verbosity",
")",
":",
"verbosity",
"=",
"int",
"(",
"verbosity",
")",
"if",
"verbosity",
"==",
"0",
":",
"return",
"logging",
".",
"WARNING",
"if",
"verbosity",
"==",
"1",
":",
"return",
"logging",
".",
"INFO",
"if",
"verbosity",
"==",
"2",
":",
"return",
"logging",
".",
"DEBUG",
"if",
"verbosity",
">=",
"3",
":",
"return",
"0",
"else",
":",
"raise",
"ValueError"
] | Convert the number of times the user specified '-v' on the command-line
into a log level. | [
"Convert",
"the",
"number",
"of",
"times",
"the",
"user",
"specified",
"-",
"v",
"on",
"the",
"command",
"-",
"line",
"into",
"a",
"log",
"level",
"."
] | 3abf4a4680056d6d97f2a5988972eb9392756fb6 | https://github.com/kalekundert/nonstdlib/blob/3abf4a4680056d6d97f2a5988972eb9392756fb6/nonstdlib/debug.py#L36-L52 |
250,095 | kalekundert/nonstdlib | nonstdlib/debug.py | config | def config(stream=sys.stderr,
level=logging.NOTSET,
format='%(levelname)s [%(name)s:%(lineno)s] %(message)s',
file=None,
file_level=None,
file_format=None):
"""
Configure logging to stream and file concurrently.
Allows setting a file and stream to log to concurrently with differing
level if desired. Must provide either stream or file.
Parameters
----------
stream: File like:
Stream to write file to [Default: sys.stderr]. If none, will not write
to stream.
level: str|int
Log level. Allowable levels are those recognized by the logging module.
format: str
Format string for the emitted logs. Default: 'log_level [logger_name]
message'
file: str
Path to write log to file. If None, will not log to file.
file_level: str|int|None
Overwrite log level for the file. Will default to `level` if None.
file_format: str|None
Overwrite format of logs for file. Will default to `format` if None.
"""
# It doesn't make sense to configure a logger with no handlers.
assert file is not None or stream is not None
# Set the formats.
stream_format = format
if file_format is None:
file_format = stream_format
# Get the log levels
stream_level = log_level(level)
if file_level is None:
file_level = stream_level
else:
file_level = log_level(file_level)
# Everything falls to pieces if I don't use basicConfig.
if stream is not None:
logging.basicConfig(stream=stream,
level=stream_level,
format=stream_format)
# Configure file
if file is not None:
file = logging.FileHandler(filename=file)
file.setLevel(file_level)
file.setFormatter(logging.Formatter(file_format))
logging.getLogger().addHandler(file)
elif file is not None:
logging.basicConfig(filename=file,
level=file_level,
format=file_format) | python | def config(stream=sys.stderr,
level=logging.NOTSET,
format='%(levelname)s [%(name)s:%(lineno)s] %(message)s',
file=None,
file_level=None,
file_format=None):
"""
Configure logging to stream and file concurrently.
Allows setting a file and stream to log to concurrently with differing
level if desired. Must provide either stream or file.
Parameters
----------
stream: File like:
Stream to write file to [Default: sys.stderr]. If none, will not write
to stream.
level: str|int
Log level. Allowable levels are those recognized by the logging module.
format: str
Format string for the emitted logs. Default: 'log_level [logger_name]
message'
file: str
Path to write log to file. If None, will not log to file.
file_level: str|int|None
Overwrite log level for the file. Will default to `level` if None.
file_format: str|None
Overwrite format of logs for file. Will default to `format` if None.
"""
# It doesn't make sense to configure a logger with no handlers.
assert file is not None or stream is not None
# Set the formats.
stream_format = format
if file_format is None:
file_format = stream_format
# Get the log levels
stream_level = log_level(level)
if file_level is None:
file_level = stream_level
else:
file_level = log_level(file_level)
# Everything falls to pieces if I don't use basicConfig.
if stream is not None:
logging.basicConfig(stream=stream,
level=stream_level,
format=stream_format)
# Configure file
if file is not None:
file = logging.FileHandler(filename=file)
file.setLevel(file_level)
file.setFormatter(logging.Formatter(file_format))
logging.getLogger().addHandler(file)
elif file is not None:
logging.basicConfig(filename=file,
level=file_level,
format=file_format) | [
"def",
"config",
"(",
"stream",
"=",
"sys",
".",
"stderr",
",",
"level",
"=",
"logging",
".",
"NOTSET",
",",
"format",
"=",
"'%(levelname)s [%(name)s:%(lineno)s] %(message)s'",
",",
"file",
"=",
"None",
",",
"file_level",
"=",
"None",
",",
"file_format",
"=",
"None",
")",
":",
"# It doesn't make sense to configure a logger with no handlers.",
"assert",
"file",
"is",
"not",
"None",
"or",
"stream",
"is",
"not",
"None",
"# Set the formats.",
"stream_format",
"=",
"format",
"if",
"file_format",
"is",
"None",
":",
"file_format",
"=",
"stream_format",
"# Get the log levels",
"stream_level",
"=",
"log_level",
"(",
"level",
")",
"if",
"file_level",
"is",
"None",
":",
"file_level",
"=",
"stream_level",
"else",
":",
"file_level",
"=",
"log_level",
"(",
"file_level",
")",
"# Everything falls to pieces if I don't use basicConfig.",
"if",
"stream",
"is",
"not",
"None",
":",
"logging",
".",
"basicConfig",
"(",
"stream",
"=",
"stream",
",",
"level",
"=",
"stream_level",
",",
"format",
"=",
"stream_format",
")",
"# Configure file",
"if",
"file",
"is",
"not",
"None",
":",
"file",
"=",
"logging",
".",
"FileHandler",
"(",
"filename",
"=",
"file",
")",
"file",
".",
"setLevel",
"(",
"file_level",
")",
"file",
".",
"setFormatter",
"(",
"logging",
".",
"Formatter",
"(",
"file_format",
")",
")",
"logging",
".",
"getLogger",
"(",
")",
".",
"addHandler",
"(",
"file",
")",
"elif",
"file",
"is",
"not",
"None",
":",
"logging",
".",
"basicConfig",
"(",
"filename",
"=",
"file",
",",
"level",
"=",
"file_level",
",",
"format",
"=",
"file_format",
")"
] | Configure logging to stream and file concurrently.
Allows setting a file and stream to log to concurrently with differing
level if desired. Must provide either stream or file.
Parameters
----------
stream: File like:
Stream to write file to [Default: sys.stderr]. If none, will not write
to stream.
level: str|int
Log level. Allowable levels are those recognized by the logging module.
format: str
Format string for the emitted logs. Default: 'log_level [logger_name]
message'
file: str
Path to write log to file. If None, will not log to file.
file_level: str|int|None
Overwrite log level for the file. Will default to `level` if None.
file_format: str|None
Overwrite format of logs for file. Will default to `format` if None. | [
"Configure",
"logging",
"to",
"stream",
"and",
"file",
"concurrently",
"."
] | 3abf4a4680056d6d97f2a5988972eb9392756fb6 | https://github.com/kalekundert/nonstdlib/blob/3abf4a4680056d6d97f2a5988972eb9392756fb6/nonstdlib/debug.py#L54-L117 |
250,096 | kalekundert/nonstdlib | nonstdlib/debug.py | _log | def _log(level, message, frame_depth=2, **kwargs):
"""
Log the given message with the given log level using a logger named based
on the scope of the calling code. This saves you time because you will be
able to see where all your log messages are being generated from without
having to type anything. This function is meant to be called by one or
more wrapper functions, so the `frame_depth` argument is provided to
specify which scope should be used to name the logger.
"""
import inspect
try:
# Inspect variables two frames up from where we currently are (by
# default). One frame up is assumed to be one of the helper methods
# defined in this module, so we aren't interested in that. Two frames
# up should be the frame that's actually trying to log something.
frame = inspect.stack()[frame_depth][0]
frame_below = inspect.stack()[frame_depth-1][0]
# Collect all the variables in the scope of the calling code, so they
# can be substituted into the message.
scope = {}
scope.update(frame.f_globals)
scope.update(frame.f_locals)
# If the calling frame is inside a class (deduced based on the presence
# of a 'self' variable), name the logger after that class. Otherwise
# if the calling frame is inside a function, name the logger after that
# function. Otherwise name it after the module of the calling scope.
self = frame.f_locals.get('self')
function = inspect.getframeinfo(frame).function
module = frame.f_globals['__name__']
if self is not None:
name = '.'.join([
self.__class__.__module__,
self.__class__.__name__
])
elif function != '<module>':
name = '.'.join([module, function])
else:
name = module
# Trick the logging module into reading file names and line numbers
# from the correct frame by monkey-patching logging.currentframe() with
# a function that returns the frame below the real calling frame (this
# indirection is necessary because the logging module will look one
# frame above whatever this function returns). Undo all this after
# logging our message, to avoid interfering with other loggers.
with _temporarily_set_logging_frame(frame_below):
logger = logging.getLogger(name)
logger.log(level, message.format(**scope), **kwargs)
finally:
try: del frame
except UnboundLocalError: pass | python | def _log(level, message, frame_depth=2, **kwargs):
"""
Log the given message with the given log level using a logger named based
on the scope of the calling code. This saves you time because you will be
able to see where all your log messages are being generated from without
having to type anything. This function is meant to be called by one or
more wrapper functions, so the `frame_depth` argument is provided to
specify which scope should be used to name the logger.
"""
import inspect
try:
# Inspect variables two frames up from where we currently are (by
# default). One frame up is assumed to be one of the helper methods
# defined in this module, so we aren't interested in that. Two frames
# up should be the frame that's actually trying to log something.
frame = inspect.stack()[frame_depth][0]
frame_below = inspect.stack()[frame_depth-1][0]
# Collect all the variables in the scope of the calling code, so they
# can be substituted into the message.
scope = {}
scope.update(frame.f_globals)
scope.update(frame.f_locals)
# If the calling frame is inside a class (deduced based on the presence
# of a 'self' variable), name the logger after that class. Otherwise
# if the calling frame is inside a function, name the logger after that
# function. Otherwise name it after the module of the calling scope.
self = frame.f_locals.get('self')
function = inspect.getframeinfo(frame).function
module = frame.f_globals['__name__']
if self is not None:
name = '.'.join([
self.__class__.__module__,
self.__class__.__name__
])
elif function != '<module>':
name = '.'.join([module, function])
else:
name = module
# Trick the logging module into reading file names and line numbers
# from the correct frame by monkey-patching logging.currentframe() with
# a function that returns the frame below the real calling frame (this
# indirection is necessary because the logging module will look one
# frame above whatever this function returns). Undo all this after
# logging our message, to avoid interfering with other loggers.
with _temporarily_set_logging_frame(frame_below):
logger = logging.getLogger(name)
logger.log(level, message.format(**scope), **kwargs)
finally:
try: del frame
except UnboundLocalError: pass | [
"def",
"_log",
"(",
"level",
",",
"message",
",",
"frame_depth",
"=",
"2",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"inspect",
"try",
":",
"# Inspect variables two frames up from where we currently are (by ",
"# default). One frame up is assumed to be one of the helper methods ",
"# defined in this module, so we aren't interested in that. Two frames ",
"# up should be the frame that's actually trying to log something.",
"frame",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"frame_depth",
"]",
"[",
"0",
"]",
"frame_below",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"frame_depth",
"-",
"1",
"]",
"[",
"0",
"]",
"# Collect all the variables in the scope of the calling code, so they ",
"# can be substituted into the message.",
"scope",
"=",
"{",
"}",
"scope",
".",
"update",
"(",
"frame",
".",
"f_globals",
")",
"scope",
".",
"update",
"(",
"frame",
".",
"f_locals",
")",
"# If the calling frame is inside a class (deduced based on the presence ",
"# of a 'self' variable), name the logger after that class. Otherwise ",
"# if the calling frame is inside a function, name the logger after that ",
"# function. Otherwise name it after the module of the calling scope.",
"self",
"=",
"frame",
".",
"f_locals",
".",
"get",
"(",
"'self'",
")",
"function",
"=",
"inspect",
".",
"getframeinfo",
"(",
"frame",
")",
".",
"function",
"module",
"=",
"frame",
".",
"f_globals",
"[",
"'__name__'",
"]",
"if",
"self",
"is",
"not",
"None",
":",
"name",
"=",
"'.'",
".",
"join",
"(",
"[",
"self",
".",
"__class__",
".",
"__module__",
",",
"self",
".",
"__class__",
".",
"__name__",
"]",
")",
"elif",
"function",
"!=",
"'<module>'",
":",
"name",
"=",
"'.'",
".",
"join",
"(",
"[",
"module",
",",
"function",
"]",
")",
"else",
":",
"name",
"=",
"module",
"# Trick the logging module into reading file names and line numbers ",
"# from the correct frame by monkey-patching logging.currentframe() with ",
"# a function that returns the frame below the real calling frame (this ",
"# indirection is necessary because the logging module will look one ",
"# frame above whatever this function returns). Undo all this after ",
"# logging our message, to avoid interfering with other loggers.",
"with",
"_temporarily_set_logging_frame",
"(",
"frame_below",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"name",
")",
"logger",
".",
"log",
"(",
"level",
",",
"message",
".",
"format",
"(",
"*",
"*",
"scope",
")",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"try",
":",
"del",
"frame",
"except",
"UnboundLocalError",
":",
"pass"
] | Log the given message with the given log level using a logger named based
on the scope of the calling code. This saves you time because you will be
able to see where all your log messages are being generated from without
having to type anything. This function is meant to be called by one or
more wrapper functions, so the `frame_depth` argument is provided to
specify which scope should be used to name the logger. | [
"Log",
"the",
"given",
"message",
"with",
"the",
"given",
"log",
"level",
"using",
"a",
"logger",
"named",
"based",
"on",
"the",
"scope",
"of",
"the",
"calling",
"code",
".",
"This",
"saves",
"you",
"time",
"because",
"you",
"will",
"be",
"able",
"to",
"see",
"where",
"all",
"your",
"log",
"messages",
"are",
"being",
"generated",
"from",
"without",
"having",
"to",
"type",
"anything",
".",
"This",
"function",
"is",
"meant",
"to",
"be",
"called",
"by",
"one",
"or",
"more",
"wrapper",
"functions",
"so",
"the",
"frame_depth",
"argument",
"is",
"provided",
"to",
"specify",
"which",
"scope",
"should",
"be",
"used",
"to",
"name",
"the",
"logger",
"."
] | 3abf4a4680056d6d97f2a5988972eb9392756fb6 | https://github.com/kalekundert/nonstdlib/blob/3abf4a4680056d6d97f2a5988972eb9392756fb6/nonstdlib/debug.py#L145-L206 |
250,097 | rorr73/LifeSOSpy | lifesospy/baseunit.py | BaseUnit.on_switch_state_changed | def on_switch_state_changed(
self, func: Callable[['BaseUnit', SwitchNumber, Optional[bool]], None]):
"""
Define the switch state changed callback implementation.
Expected signature is:
switch_state_changed_callback(base_unit, switch_number, state)
base_unit: the device instance for this callback
switch_number: the switch whose state has changed
state: True if switch turned on, or False if switch turned off
"""
self._on_switch_state_changed = func | python | def on_switch_state_changed(
self, func: Callable[['BaseUnit', SwitchNumber, Optional[bool]], None]):
"""
Define the switch state changed callback implementation.
Expected signature is:
switch_state_changed_callback(base_unit, switch_number, state)
base_unit: the device instance for this callback
switch_number: the switch whose state has changed
state: True if switch turned on, or False if switch turned off
"""
self._on_switch_state_changed = func | [
"def",
"on_switch_state_changed",
"(",
"self",
",",
"func",
":",
"Callable",
"[",
"[",
"'BaseUnit'",
",",
"SwitchNumber",
",",
"Optional",
"[",
"bool",
"]",
"]",
",",
"None",
"]",
")",
":",
"self",
".",
"_on_switch_state_changed",
"=",
"func"
] | Define the switch state changed callback implementation.
Expected signature is:
switch_state_changed_callback(base_unit, switch_number, state)
base_unit: the device instance for this callback
switch_number: the switch whose state has changed
state: True if switch turned on, or False if switch turned off | [
"Define",
"the",
"switch",
"state",
"changed",
"callback",
"implementation",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/baseunit.py#L267-L279 |
250,098 | rorr73/LifeSOSpy | lifesospy/baseunit.py | BaseUnit.start | def start(self) -> None:
"""
Start monitoring the base unit.
"""
self._shutdown = False
# Start listening (if server) / Open connection (if client)
if isinstance(self._protocol, Server):
self.create_task(self._async_listen)
elif isinstance(self._protocol, Client):
self.create_task(self._async_open)
else:
raise NotImplementedError | python | def start(self) -> None:
"""
Start monitoring the base unit.
"""
self._shutdown = False
# Start listening (if server) / Open connection (if client)
if isinstance(self._protocol, Server):
self.create_task(self._async_listen)
elif isinstance(self._protocol, Client):
self.create_task(self._async_open)
else:
raise NotImplementedError | [
"def",
"start",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_shutdown",
"=",
"False",
"# Start listening (if server) / Open connection (if client)",
"if",
"isinstance",
"(",
"self",
".",
"_protocol",
",",
"Server",
")",
":",
"self",
".",
"create_task",
"(",
"self",
".",
"_async_listen",
")",
"elif",
"isinstance",
"(",
"self",
".",
"_protocol",
",",
"Client",
")",
":",
"self",
".",
"create_task",
"(",
"self",
".",
"_async_open",
")",
"else",
":",
"raise",
"NotImplementedError"
] | Start monitoring the base unit. | [
"Start",
"monitoring",
"the",
"base",
"unit",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/baseunit.py#L285-L298 |
250,099 | rorr73/LifeSOSpy | lifesospy/baseunit.py | BaseUnit.stop | def stop(self) -> None:
"""
Stop monitoring the base unit.
"""
self._shutdown = True
# Close connection if needed
self._protocol.close()
# Cancel any pending tasks
self.cancel_pending_tasks() | python | def stop(self) -> None:
"""
Stop monitoring the base unit.
"""
self._shutdown = True
# Close connection if needed
self._protocol.close()
# Cancel any pending tasks
self.cancel_pending_tasks() | [
"def",
"stop",
"(",
"self",
")",
"->",
"None",
":",
"self",
".",
"_shutdown",
"=",
"True",
"# Close connection if needed",
"self",
".",
"_protocol",
".",
"close",
"(",
")",
"# Cancel any pending tasks",
"self",
".",
"cancel_pending_tasks",
"(",
")"
] | Stop monitoring the base unit. | [
"Stop",
"monitoring",
"the",
"base",
"unit",
"."
] | 62360fbab2e90bf04d52b547093bdab2d4e389b4 | https://github.com/rorr73/LifeSOSpy/blob/62360fbab2e90bf04d52b547093bdab2d4e389b4/lifesospy/baseunit.py#L300-L311 |
Subsets and Splits