Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
HttpResponseBase.set_cookie
(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None)
Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``.
Set a cookie.
def set_cookie(self, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False, samesite=None): """ Set a cookie. ``expires`` can be: - a string in the correct format, - a naive ``datetime.datetime`` object in UTC, - an aware ``datetime.datetime`` object in any time zone. If it is a ``datetime.datetime`` object then calculate ``max_age``. """ self.cookies[key] = value if expires is not None: if isinstance(expires, datetime.datetime): if timezone.is_aware(expires): expires = timezone.make_naive(expires, timezone.utc) delta = expires - expires.utcnow() # Add one second so the date matches exactly (a fraction of # time gets lost between converting to a timedelta and # then the date string). delta = delta + datetime.timedelta(seconds=1) # Just set max_age - the max_age logic will set expires. expires = None max_age = max(0, delta.days * 86400 + delta.seconds) else: self.cookies[key]['expires'] = expires else: self.cookies[key]['expires'] = '' if max_age is not None: self.cookies[key]['max-age'] = int(max_age) # IE requires expires, so set it if hasn't been already. if not expires: self.cookies[key]['expires'] = http_date(time.time() + max_age) if path is not None: self.cookies[key]['path'] = path if domain is not None: self.cookies[key]['domain'] = domain if secure: self.cookies[key]['secure'] = True if httponly: self.cookies[key]['httponly'] = True if samesite: if samesite.lower() not in ('lax', 'none', 'strict'): raise ValueError('samesite must be "lax", "none", or "strict".') self.cookies[key]['samesite'] = samesite
[ "def", "set_cookie", "(", "self", ",", "key", ",", "value", "=", "''", ",", "max_age", "=", "None", ",", "expires", "=", "None", ",", "path", "=", "'/'", ",", "domain", "=", "None", ",", "secure", "=", "False", ",", "httponly", "=", "False", ",", "samesite", "=", "None", ")", ":", "self", ".", "cookies", "[", "key", "]", "=", "value", "if", "expires", "is", "not", "None", ":", "if", "isinstance", "(", "expires", ",", "datetime", ".", "datetime", ")", ":", "if", "timezone", ".", "is_aware", "(", "expires", ")", ":", "expires", "=", "timezone", ".", "make_naive", "(", "expires", ",", "timezone", ".", "utc", ")", "delta", "=", "expires", "-", "expires", ".", "utcnow", "(", ")", "# Add one second so the date matches exactly (a fraction of", "# time gets lost between converting to a timedelta and", "# then the date string).", "delta", "=", "delta", "+", "datetime", ".", "timedelta", "(", "seconds", "=", "1", ")", "# Just set max_age - the max_age logic will set expires.", "expires", "=", "None", "max_age", "=", "max", "(", "0", ",", "delta", ".", "days", "*", "86400", "+", "delta", ".", "seconds", ")", "else", ":", "self", ".", "cookies", "[", "key", "]", "[", "'expires'", "]", "=", "expires", "else", ":", "self", ".", "cookies", "[", "key", "]", "[", "'expires'", "]", "=", "''", "if", "max_age", "is", "not", "None", ":", "self", ".", "cookies", "[", "key", "]", "[", "'max-age'", "]", "=", "int", "(", "max_age", ")", "# IE requires expires, so set it if hasn't been already.", "if", "not", "expires", ":", "self", ".", "cookies", "[", "key", "]", "[", "'expires'", "]", "=", "http_date", "(", "time", ".", "time", "(", ")", "+", "max_age", ")", "if", "path", "is", "not", "None", ":", "self", ".", "cookies", "[", "key", "]", "[", "'path'", "]", "=", "path", "if", "domain", "is", "not", "None", ":", "self", ".", "cookies", "[", "key", "]", "[", "'domain'", "]", "=", "domain", "if", "secure", ":", "self", ".", "cookies", "[", "key", "]", "[", "'secure'", "]", "=", "True", "if", "httponly", ":", "self", ".", "cookies", "[", "key", "]", "[", "'httponly'", "]", "=", "True", "if", "samesite", ":", "if", "samesite", ".", "lower", "(", ")", "not", "in", "(", "'lax'", ",", "'none'", ",", "'strict'", ")", ":", "raise", "ValueError", "(", "'samesite must be \"lax\", \"none\", or \"strict\".'", ")", "self", ".", "cookies", "[", "key", "]", "[", "'samesite'", "]", "=", "samesite" ]
[ 191, 4 ]
[ 235, 52 ]
python
en
['en', 'error', 'th']
False
HttpResponseBase.setdefault
(self, key, value)
Set a header unless it has already been set.
Set a header unless it has already been set.
def setdefault(self, key, value): """Set a header unless it has already been set.""" self.headers.setdefault(key, value)
[ "def", "setdefault", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "headers", ".", "setdefault", "(", "key", ",", "value", ")" ]
[ 237, 4 ]
[ 239, 43 ]
python
en
['en', 'lb', 'en']
True
HttpResponseBase.make_bytes
(self, value)
Turn a value into a bytestring encoded in the output charset.
Turn a value into a bytestring encoded in the output charset.
def make_bytes(self, value): """Turn a value into a bytestring encoded in the output charset.""" # Per PEP 3333, this response body must be bytes. To avoid returning # an instance of a subclass, this function returns `bytes(value)`. # This doesn't make a copy when `value` already contains bytes. # Handle string types -- we can't rely on force_bytes here because: # - Python attempts str conversion first # - when self._charset != 'utf-8' it re-encodes the content if isinstance(value, (bytes, memoryview)): return bytes(value) if isinstance(value, str): return bytes(value.encode(self.charset)) # Handle non-string types. return str(value).encode(self.charset)
[ "def", "make_bytes", "(", "self", ",", "value", ")", ":", "# Per PEP 3333, this response body must be bytes. To avoid returning", "# an instance of a subclass, this function returns `bytes(value)`.", "# This doesn't make a copy when `value` already contains bytes.", "# Handle string types -- we can't rely on force_bytes here because:", "# - Python attempts str conversion first", "# - when self._charset != 'utf-8' it re-encodes the content", "if", "isinstance", "(", "value", ",", "(", "bytes", ",", "memoryview", ")", ")", ":", "return", "bytes", "(", "value", ")", "if", "isinstance", "(", "value", ",", "str", ")", ":", "return", "bytes", "(", "value", ".", "encode", "(", "self", ".", "charset", ")", ")", "# Handle non-string types.", "return", "str", "(", "value", ")", ".", "encode", "(", "self", ".", "charset", ")" ]
[ 261, 4 ]
[ 275, 46 ]
python
en
['en', 'en', 'en']
True
HttpResponse.serialize
(self)
Full HTTP message, including headers, as a bytestring.
Full HTTP message, including headers, as a bytestring.
def serialize(self): """Full HTTP message, including headers, as a bytestring.""" return self.serialize_headers() + b'\r\n\r\n' + self.content
[ "def", "serialize", "(", "self", ")", ":", "return", "self", ".", "serialize_headers", "(", ")", "+", "b'\\r\\n\\r\\n'", "+", "self", ".", "content" ]
[ 339, 4 ]
[ 341, 68 ]
python
en
['en', 'en', 'en']
True
FileResponse.set_headers
(self, filelike)
Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content.
Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content.
def set_headers(self, filelike): """ Set some common response headers (Content-Length, Content-Type, and Content-Disposition) based on the `filelike` response content. """ encoding_map = { 'bzip2': 'application/x-bzip', 'gzip': 'application/gzip', 'xz': 'application/x-xz', } filename = getattr(filelike, 'name', None) filename = filename if (isinstance(filename, str) and filename) else self.filename if os.path.isabs(filename): self.headers['Content-Length'] = os.path.getsize(filelike.name) elif hasattr(filelike, 'getbuffer'): self.headers['Content-Length'] = filelike.getbuffer().nbytes if self.headers.get('Content-Type', '').startswith('text/html'): if filename: content_type, encoding = mimetypes.guess_type(filename) # Encoding isn't set to prevent browsers from automatically # uncompressing files. content_type = encoding_map.get(encoding, content_type) self.headers['Content-Type'] = content_type or 'application/octet-stream' else: self.headers['Content-Type'] = 'application/octet-stream' filename = self.filename or os.path.basename(filename) if filename: disposition = 'attachment' if self.as_attachment else 'inline' try: filename.encode('ascii') file_expr = 'filename="{}"'.format(filename) except UnicodeEncodeError: file_expr = "filename*=utf-8''{}".format(quote(filename)) self.headers['Content-Disposition'] = '{}; {}'.format(disposition, file_expr) elif self.as_attachment: self.headers['Content-Disposition'] = 'attachment'
[ "def", "set_headers", "(", "self", ",", "filelike", ")", ":", "encoding_map", "=", "{", "'bzip2'", ":", "'application/x-bzip'", ",", "'gzip'", ":", "'application/gzip'", ",", "'xz'", ":", "'application/x-xz'", ",", "}", "filename", "=", "getattr", "(", "filelike", ",", "'name'", ",", "None", ")", "filename", "=", "filename", "if", "(", "isinstance", "(", "filename", ",", "str", ")", "and", "filename", ")", "else", "self", ".", "filename", "if", "os", ".", "path", ".", "isabs", "(", "filename", ")", ":", "self", ".", "headers", "[", "'Content-Length'", "]", "=", "os", ".", "path", ".", "getsize", "(", "filelike", ".", "name", ")", "elif", "hasattr", "(", "filelike", ",", "'getbuffer'", ")", ":", "self", ".", "headers", "[", "'Content-Length'", "]", "=", "filelike", ".", "getbuffer", "(", ")", ".", "nbytes", "if", "self", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", ".", "startswith", "(", "'text/html'", ")", ":", "if", "filename", ":", "content_type", ",", "encoding", "=", "mimetypes", ".", "guess_type", "(", "filename", ")", "# Encoding isn't set to prevent browsers from automatically", "# uncompressing files.", "content_type", "=", "encoding_map", ".", "get", "(", "encoding", ",", "content_type", ")", "self", ".", "headers", "[", "'Content-Type'", "]", "=", "content_type", "or", "'application/octet-stream'", "else", ":", "self", ".", "headers", "[", "'Content-Type'", "]", "=", "'application/octet-stream'", "filename", "=", "self", ".", "filename", "or", "os", ".", "path", ".", "basename", "(", "filename", ")", "if", "filename", ":", "disposition", "=", "'attachment'", "if", "self", ".", "as_attachment", "else", "'inline'", "try", ":", "filename", ".", "encode", "(", "'ascii'", ")", "file_expr", "=", "'filename=\"{}\"'", ".", "format", "(", "filename", ")", "except", "UnicodeEncodeError", ":", "file_expr", "=", "\"filename*=utf-8''{}\"", ".", "format", "(", "quote", "(", "filename", ")", ")", "self", ".", "headers", "[", "'Content-Disposition'", "]", "=", "'{}; {}'", ".", "format", "(", "disposition", ",", "file_expr", ")", "elif", "self", ".", "as_attachment", ":", "self", ".", "headers", "[", "'Content-Disposition'", "]", "=", "'attachment'" ]
[ 455, 4 ]
[ 492, 62 ]
python
en
['en', 'error', 'th']
False
_unpack_args
(args, nargs_spec)
Given an iterable of arguments and an iterable of nargs specifications, it returns a tuple with all the unpacked arguments at the first index and all remaining arguments as the second. The nargs specification is the number of arguments that should be consumed or `-1` to indicate that this position should eat up all the remainders. Missing items are filled with `None`.
Given an iterable of arguments and an iterable of nargs specifications, it returns a tuple with all the unpacked arguments at the first index and all remaining arguments as the second.
def _unpack_args(args, nargs_spec): """Given an iterable of arguments and an iterable of nargs specifications, it returns a tuple with all the unpacked arguments at the first index and all remaining arguments as the second. The nargs specification is the number of arguments that should be consumed or `-1` to indicate that this position should eat up all the remainders. Missing items are filled with `None`. """ args = deque(args) nargs_spec = deque(nargs_spec) rv = [] spos = None def _fetch(c): try: if spos is None: return c.popleft() else: return c.pop() except IndexError: return None while nargs_spec: nargs = _fetch(nargs_spec) if nargs == 1: rv.append(_fetch(args)) elif nargs > 1: x = [_fetch(args) for _ in range(nargs)] # If we're reversed, we're pulling in the arguments in reverse, # so we need to turn them around. if spos is not None: x.reverse() rv.append(tuple(x)) elif nargs < 0: if spos is not None: raise TypeError('Cannot have two nargs < 0') spos = len(rv) rv.append(None) # spos is the position of the wildcard (star). If it's not `None`, # we fill it with the remainder. if spos is not None: rv[spos] = tuple(args) args = [] rv[spos + 1:] = reversed(rv[spos + 1:]) return tuple(rv), list(args)
[ "def", "_unpack_args", "(", "args", ",", "nargs_spec", ")", ":", "args", "=", "deque", "(", "args", ")", "nargs_spec", "=", "deque", "(", "nargs_spec", ")", "rv", "=", "[", "]", "spos", "=", "None", "def", "_fetch", "(", "c", ")", ":", "try", ":", "if", "spos", "is", "None", ":", "return", "c", ".", "popleft", "(", ")", "else", ":", "return", "c", ".", "pop", "(", ")", "except", "IndexError", ":", "return", "None", "while", "nargs_spec", ":", "nargs", "=", "_fetch", "(", "nargs_spec", ")", "if", "nargs", "==", "1", ":", "rv", ".", "append", "(", "_fetch", "(", "args", ")", ")", "elif", "nargs", ">", "1", ":", "x", "=", "[", "_fetch", "(", "args", ")", "for", "_", "in", "range", "(", "nargs", ")", "]", "# If we're reversed, we're pulling in the arguments in reverse,", "# so we need to turn them around.", "if", "spos", "is", "not", "None", ":", "x", ".", "reverse", "(", ")", "rv", ".", "append", "(", "tuple", "(", "x", ")", ")", "elif", "nargs", "<", "0", ":", "if", "spos", "is", "not", "None", ":", "raise", "TypeError", "(", "'Cannot have two nargs < 0'", ")", "spos", "=", "len", "(", "rv", ")", "rv", ".", "append", "(", "None", ")", "# spos is the position of the wildcard (star). If it's not `None`,", "# we fill it with the remainder.", "if", "spos", "is", "not", "None", ":", "rv", "[", "spos", "]", "=", "tuple", "(", "args", ")", "args", "=", "[", "]", "rv", "[", "spos", "+", "1", ":", "]", "=", "reversed", "(", "rv", "[", "spos", "+", "1", ":", "]", ")", "return", "tuple", "(", "rv", ")", ",", "list", "(", "args", ")" ]
[ 24, 0 ]
[ 72, 32 ]
python
en
['en', 'en', 'en']
True
split_arg_string
(string)
Given an argument string this attempts to split it into small parts.
Given an argument string this attempts to split it into small parts.
def split_arg_string(string): """Given an argument string this attempts to split it into small parts.""" rv = [] for match in re.finditer(r"('([^'\\]*(?:\\.[^'\\]*)*)'" r'|"([^"\\]*(?:\\.[^"\\]*)*)"' r'|\S+)\s*', string, re.S): arg = match.group().strip() if arg[:1] == arg[-1:] and arg[:1] in '"\'': arg = arg[1:-1].encode('ascii', 'backslashreplace') \ .decode('unicode-escape') try: arg = type(string)(arg) except UnicodeError: pass rv.append(arg) return rv
[ "def", "split_arg_string", "(", "string", ")", ":", "rv", "=", "[", "]", "for", "match", "in", "re", ".", "finditer", "(", "r\"('([^'\\\\]*(?:\\\\.[^'\\\\]*)*)'\"", "r'|\"([^\"\\\\]*(?:\\\\.[^\"\\\\]*)*)\"'", "r'|\\S+)\\s*'", ",", "string", ",", "re", ".", "S", ")", ":", "arg", "=", "match", ".", "group", "(", ")", ".", "strip", "(", ")", "if", "arg", "[", ":", "1", "]", "==", "arg", "[", "-", "1", ":", "]", "and", "arg", "[", ":", "1", "]", "in", "'\"\\''", ":", "arg", "=", "arg", "[", "1", ":", "-", "1", "]", ".", "encode", "(", "'ascii'", ",", "'backslashreplace'", ")", ".", "decode", "(", "'unicode-escape'", ")", "try", ":", "arg", "=", "type", "(", "string", ")", "(", "arg", ")", "except", "UnicodeError", ":", "pass", "rv", ".", "append", "(", "arg", ")", "return", "rv" ]
[ 97, 0 ]
[ 112, 13 ]
python
en
['en', 'en', 'en']
True
OptionParser.add_option
(self, opts, dest, action=None, nargs=1, const=None, obj=None)
Adds a new option named `dest` to the parser. The destination is not inferred (unlike with optparse) and needs to be explicitly provided. Action can be any of ``store``, ``store_const``, ``append``, ``appnd_const`` or ``count``. The `obj` can be used to identify the option in the order list that is returned from the parser.
Adds a new option named `dest` to the parser. The destination is not inferred (unlike with optparse) and needs to be explicitly provided. Action can be any of ``store``, ``store_const``, ``append``, ``appnd_const`` or ``count``.
def add_option(self, opts, dest, action=None, nargs=1, const=None, obj=None): """Adds a new option named `dest` to the parser. The destination is not inferred (unlike with optparse) and needs to be explicitly provided. Action can be any of ``store``, ``store_const``, ``append``, ``appnd_const`` or ``count``. The `obj` can be used to identify the option in the order list that is returned from the parser. """ if obj is None: obj = dest opts = [normalize_opt(opt, self.ctx) for opt in opts] option = Option(opts, dest, action=action, nargs=nargs, const=const, obj=obj) self._opt_prefixes.update(option.prefixes) for opt in option._short_opts: self._short_opt[opt] = option for opt in option._long_opts: self._long_opt[opt] = option
[ "def", "add_option", "(", "self", ",", "opts", ",", "dest", ",", "action", "=", "None", ",", "nargs", "=", "1", ",", "const", "=", "None", ",", "obj", "=", "None", ")", ":", "if", "obj", "is", "None", ":", "obj", "=", "dest", "opts", "=", "[", "normalize_opt", "(", "opt", ",", "self", ".", "ctx", ")", "for", "opt", "in", "opts", "]", "option", "=", "Option", "(", "opts", ",", "dest", ",", "action", "=", "action", ",", "nargs", "=", "nargs", ",", "const", "=", "const", ",", "obj", "=", "obj", ")", "self", ".", "_opt_prefixes", ".", "update", "(", "option", ".", "prefixes", ")", "for", "opt", "in", "option", ".", "_short_opts", ":", "self", ".", "_short_opt", "[", "opt", "]", "=", "option", "for", "opt", "in", "option", ".", "_long_opts", ":", "self", ".", "_long_opt", "[", "opt", "]", "=", "option" ]
[ 227, 4 ]
[ 246, 40 ]
python
en
['en', 'en', 'en']
True
OptionParser.add_argument
(self, dest, nargs=1, obj=None)
Adds a positional argument named `dest` to the parser. The `obj` can be used to identify the option in the order list that is returned from the parser.
Adds a positional argument named `dest` to the parser.
def add_argument(self, dest, nargs=1, obj=None): """Adds a positional argument named `dest` to the parser. The `obj` can be used to identify the option in the order list that is returned from the parser. """ if obj is None: obj = dest self._args.append(Argument(dest=dest, nargs=nargs, obj=obj))
[ "def", "add_argument", "(", "self", ",", "dest", ",", "nargs", "=", "1", ",", "obj", "=", "None", ")", ":", "if", "obj", "is", "None", ":", "obj", "=", "dest", "self", ".", "_args", ".", "append", "(", "Argument", "(", "dest", "=", "dest", ",", "nargs", "=", "nargs", ",", "obj", "=", "obj", ")", ")" ]
[ 248, 4 ]
[ 256, 68 ]
python
en
['en', 'en', 'en']
True
OptionParser.parse_args
(self, args)
Parses positional arguments and returns ``(values, args, order)`` for the parsed options and arguments as well as the leftover arguments if there are any. The order is a list of objects as they appear on the command line. If arguments appear multiple times they will be memorized multiple times as well.
Parses positional arguments and returns ``(values, args, order)`` for the parsed options and arguments as well as the leftover arguments if there are any. The order is a list of objects as they appear on the command line. If arguments appear multiple times they will be memorized multiple times as well.
def parse_args(self, args): """Parses positional arguments and returns ``(values, args, order)`` for the parsed options and arguments as well as the leftover arguments if there are any. The order is a list of objects as they appear on the command line. If arguments appear multiple times they will be memorized multiple times as well. """ state = ParsingState(args) try: self._process_args_for_options(state) self._process_args_for_args(state) except UsageError: if self.ctx is None or not self.ctx.resilient_parsing: raise return state.opts, state.largs, state.order
[ "def", "parse_args", "(", "self", ",", "args", ")", ":", "state", "=", "ParsingState", "(", "args", ")", "try", ":", "self", ".", "_process_args_for_options", "(", "state", ")", "self", ".", "_process_args_for_args", "(", "state", ")", "except", "UsageError", ":", "if", "self", ".", "ctx", "is", "None", "or", "not", "self", ".", "ctx", ".", "resilient_parsing", ":", "raise", "return", "state", ".", "opts", ",", "state", ".", "largs", ",", "state", ".", "order" ]
[ 258, 4 ]
[ 272, 51 ]
python
en
['en', 'en', 'en']
True
auto_decode
(data: bytes)
Check a bytes string for a BOM to correctly detect the encoding Fallback to locale.getpreferredencoding(False) like open() on Python3
Check a bytes string for a BOM to correctly detect the encoding
def auto_decode(data: bytes) -> str: """Check a bytes string for a BOM to correctly detect the encoding Fallback to locale.getpreferredencoding(False) like open() on Python3""" for bom, encoding in BOMS: if data.startswith(bom): return data[len(bom) :].decode(encoding) # Lets check the first two lines as in PEP263 for line in data.split(b"\n")[:2]: if line[0:1] == b"#" and ENCODING_RE.search(line): result = ENCODING_RE.search(line) assert result is not None encoding = result.groups()[0].decode("ascii") return data.decode(encoding) return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), )
[ "def", "auto_decode", "(", "data", ":", "bytes", ")", "->", "str", ":", "for", "bom", ",", "encoding", "in", "BOMS", ":", "if", "data", ".", "startswith", "(", "bom", ")", ":", "return", "data", "[", "len", "(", "bom", ")", ":", "]", ".", "decode", "(", "encoding", ")", "# Lets check the first two lines as in PEP263", "for", "line", "in", "data", ".", "split", "(", "b\"\\n\"", ")", "[", ":", "2", "]", ":", "if", "line", "[", "0", ":", "1", "]", "==", "b\"#\"", "and", "ENCODING_RE", ".", "search", "(", "line", ")", ":", "result", "=", "ENCODING_RE", ".", "search", "(", "line", ")", "assert", "result", "is", "not", "None", "encoding", "=", "result", ".", "groups", "(", ")", "[", "0", "]", ".", "decode", "(", "\"ascii\"", ")", "return", "data", ".", "decode", "(", "encoding", ")", "return", "data", ".", "decode", "(", "locale", ".", "getpreferredencoding", "(", "False", ")", "or", "sys", ".", "getdefaultencoding", "(", ")", ",", ")" ]
[ 19, 0 ]
[ 35, 5 ]
python
en
['en', 'en', 'en']
True
generate_metadata
(build_env, backend)
Generate metadata using mechanisms described in PEP 517. Returns the generated metadata directory.
Generate metadata using mechanisms described in PEP 517.
def generate_metadata(build_env, backend): # type: (BuildEnvironment, Pep517HookCaller) -> str """Generate metadata using mechanisms described in PEP 517. Returns the generated metadata directory. """ metadata_tmpdir = TempDirectory( kind="modern-metadata", globally_managed=True ) metadata_dir = metadata_tmpdir.path with build_env: # Note that Pep517HookCaller implements a fallback for # prepare_metadata_for_build_wheel, so we don't have to # consider the possibility that this hook doesn't exist. runner = runner_with_spinner_message("Preparing wheel metadata") with backend.subprocess_runner(runner): distinfo_dir = backend.prepare_metadata_for_build_wheel( metadata_dir ) return os.path.join(metadata_dir, distinfo_dir)
[ "def", "generate_metadata", "(", "build_env", ",", "backend", ")", ":", "# type: (BuildEnvironment, Pep517HookCaller) -> str", "metadata_tmpdir", "=", "TempDirectory", "(", "kind", "=", "\"modern-metadata\"", ",", "globally_managed", "=", "True", ")", "metadata_dir", "=", "metadata_tmpdir", ".", "path", "with", "build_env", ":", "# Note that Pep517HookCaller implements a fallback for", "# prepare_metadata_for_build_wheel, so we don't have to", "# consider the possibility that this hook doesn't exist.", "runner", "=", "runner_with_spinner_message", "(", "\"Preparing wheel metadata\"", ")", "with", "backend", ".", "subprocess_runner", "(", "runner", ")", ":", "distinfo_dir", "=", "backend", ".", "prepare_metadata_for_build_wheel", "(", "metadata_dir", ")", "return", "os", ".", "path", ".", "join", "(", "metadata_dir", ",", "distinfo_dir", ")" ]
[ 12, 0 ]
[ 34, 51 ]
python
en
['en', 'nl', 'en']
True
stringfilter
(func)
Decorator for filters which should only receive strings. The object passed as the first positional argument will be converted to a string.
Decorator for filters which should only receive strings. The object passed as the first positional argument will be converted to a string.
def stringfilter(func): """ Decorator for filters which should only receive strings. The object passed as the first positional argument will be converted to a string. """ def _dec(*args, **kwargs): args = list(args) args[0] = str(args[0]) if (isinstance(args[0], SafeData) and getattr(_dec._decorated_function, 'is_safe', False)): return mark_safe(func(*args, **kwargs)) return func(*args, **kwargs) # Include a reference to the real function (used to check original # arguments by the template parser, and to bear the 'is_safe' attribute # when multiple decorators are applied). _dec._decorated_function = getattr(func, '_decorated_function', func) return wraps(func)(_dec)
[ "def", "stringfilter", "(", "func", ")", ":", "def", "_dec", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "args", "=", "list", "(", "args", ")", "args", "[", "0", "]", "=", "str", "(", "args", "[", "0", "]", ")", "if", "(", "isinstance", "(", "args", "[", "0", "]", ",", "SafeData", ")", "and", "getattr", "(", "_dec", ".", "_decorated_function", ",", "'is_safe'", ",", "False", ")", ")", ":", "return", "mark_safe", "(", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "# Include a reference to the real function (used to check original", "# arguments by the template parser, and to bear the 'is_safe' attribute", "# when multiple decorators are applied).", "_dec", ".", "_decorated_function", "=", "getattr", "(", "func", ",", "'_decorated_function'", ",", "func", ")", "return", "wraps", "(", "func", ")", "(", "_dec", ")" ]
[ 34, 0 ]
[ 52, 28 ]
python
en
['en', 'error', 'th']
False
addslashes
(value)
Add slashes before quotes. Useful for escaping strings in CSV, for example. Less useful for escaping JavaScript; use the ``escapejs`` filter instead.
Add slashes before quotes. Useful for escaping strings in CSV, for example. Less useful for escaping JavaScript; use the ``escapejs`` filter instead.
def addslashes(value): """ Add slashes before quotes. Useful for escaping strings in CSV, for example. Less useful for escaping JavaScript; use the ``escapejs`` filter instead. """ return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
[ "def", "addslashes", "(", "value", ")", ":", "return", "value", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", ".", "replace", "(", "'\"'", ",", "'\\\\\"'", ")", ".", "replace", "(", "\"'\"", ",", "\"\\\\'\"", ")" ]
[ 61, 0 ]
[ 67, 78 ]
python
en
['en', 'error', 'th']
False
capfirst
(value)
Capitalize the first character of the value.
Capitalize the first character of the value.
def capfirst(value): """Capitalize the first character of the value.""" return value and value[0].upper() + value[1:]
[ "def", "capfirst", "(", "value", ")", ":", "return", "value", "and", "value", "[", "0", "]", ".", "upper", "(", ")", "+", "value", "[", "1", ":", "]" ]
[ 72, 0 ]
[ 74, 49 ]
python
en
['en', 'en', 'en']
True
escapejs_filter
(value)
Hex encode characters for use in JavaScript strings.
Hex encode characters for use in JavaScript strings.
def escapejs_filter(value): """Hex encode characters for use in JavaScript strings.""" return escapejs(value)
[ "def", "escapejs_filter", "(", "value", ")", ":", "return", "escapejs", "(", "value", ")" ]
[ 79, 0 ]
[ 81, 26 ]
python
en
['en', 'en', 'en']
True
json_script
(value, element_id)
Output value JSON-encoded, wrapped in a <script type="application/json"> tag.
Output value JSON-encoded, wrapped in a <script type="application/json"> tag.
def json_script(value, element_id): """ Output value JSON-encoded, wrapped in a <script type="application/json"> tag. """ return _json_script(value, element_id)
[ "def", "json_script", "(", "value", ",", "element_id", ")", ":", "return", "_json_script", "(", "value", ",", "element_id", ")" ]
[ 85, 0 ]
[ 90, 42 ]
python
en
['en', 'error', 'th']
False
floatformat
(text, arg=-1)
Display a float to a specified number of decimal places. If called without an argument, display the floating point number with one decimal place -- but only if there's a decimal place to be displayed: * num1 = 34.23234 * num2 = 34.00000 * num3 = 34.26000 * {{ num1|floatformat }} displays "34.2" * {{ num2|floatformat }} displays "34" * {{ num3|floatformat }} displays "34.3" If arg is positive, always display exactly arg number of decimal places: * {{ num1|floatformat:3 }} displays "34.232" * {{ num2|floatformat:3 }} displays "34.000" * {{ num3|floatformat:3 }} displays "34.260" If arg is negative, display arg number of decimal places -- but only if there are places to be displayed: * {{ num1|floatformat:"-3" }} displays "34.232" * {{ num2|floatformat:"-3" }} displays "34" * {{ num3|floatformat:"-3" }} displays "34.260" If arg has the 'g' suffix, force the result to be grouped by the THOUSAND_SEPARATOR for the active locale. When the active locale is en (English): * {{ 6666.6666|floatformat:"2g" }} displays "6,666.67" * {{ 10000|floatformat:"g" }} displays "10,000" If the input float is infinity or NaN, display the string representation of that value.
Display a float to a specified number of decimal places.
def floatformat(text, arg=-1): """ Display a float to a specified number of decimal places. If called without an argument, display the floating point number with one decimal place -- but only if there's a decimal place to be displayed: * num1 = 34.23234 * num2 = 34.00000 * num3 = 34.26000 * {{ num1|floatformat }} displays "34.2" * {{ num2|floatformat }} displays "34" * {{ num3|floatformat }} displays "34.3" If arg is positive, always display exactly arg number of decimal places: * {{ num1|floatformat:3 }} displays "34.232" * {{ num2|floatformat:3 }} displays "34.000" * {{ num3|floatformat:3 }} displays "34.260" If arg is negative, display arg number of decimal places -- but only if there are places to be displayed: * {{ num1|floatformat:"-3" }} displays "34.232" * {{ num2|floatformat:"-3" }} displays "34" * {{ num3|floatformat:"-3" }} displays "34.260" If arg has the 'g' suffix, force the result to be grouped by the THOUSAND_SEPARATOR for the active locale. When the active locale is en (English): * {{ 6666.6666|floatformat:"2g" }} displays "6,666.67" * {{ 10000|floatformat:"g" }} displays "10,000" If the input float is infinity or NaN, display the string representation of that value. """ force_grouping = False if isinstance(arg, str) and arg.endswith('g'): force_grouping = True arg = arg[:-1] or -1 try: input_val = repr(text) d = Decimal(input_val) except InvalidOperation: try: d = Decimal(str(float(text))) except (ValueError, InvalidOperation, TypeError): return '' try: p = int(arg) except ValueError: return input_val try: m = int(d) - d except (ValueError, OverflowError, InvalidOperation): return input_val if not m and p < 0: return mark_safe( formats.number_format('%d' % (int(d)), 0, force_grouping=force_grouping), ) exp = Decimal(1).scaleb(-abs(p)) # Set the precision high enough to avoid an exception (#15789). tupl = d.as_tuple() units = len(tupl[1]) units += -tupl[2] if m else tupl[2] prec = abs(p) + units + 1 # Avoid conversion to scientific notation by accessing `sign`, `digits`, # and `exponent` from Decimal.as_tuple() directly. rounded_d = d.quantize(exp, ROUND_HALF_UP, Context(prec=prec)) sign, digits, exponent = rounded_d.as_tuple() digits = [str(digit) for digit in reversed(digits)] while len(digits) <= abs(exponent): digits.append('0') digits.insert(-exponent, '.') if sign and rounded_d: digits.append('-') number = ''.join(reversed(digits)) return mark_safe( formats.number_format(number, abs(p), force_grouping=force_grouping), )
[ "def", "floatformat", "(", "text", ",", "arg", "=", "-", "1", ")", ":", "force_grouping", "=", "False", "if", "isinstance", "(", "arg", ",", "str", ")", "and", "arg", ".", "endswith", "(", "'g'", ")", ":", "force_grouping", "=", "True", "arg", "=", "arg", "[", ":", "-", "1", "]", "or", "-", "1", "try", ":", "input_val", "=", "repr", "(", "text", ")", "d", "=", "Decimal", "(", "input_val", ")", "except", "InvalidOperation", ":", "try", ":", "d", "=", "Decimal", "(", "str", "(", "float", "(", "text", ")", ")", ")", "except", "(", "ValueError", ",", "InvalidOperation", ",", "TypeError", ")", ":", "return", "''", "try", ":", "p", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "return", "input_val", "try", ":", "m", "=", "int", "(", "d", ")", "-", "d", "except", "(", "ValueError", ",", "OverflowError", ",", "InvalidOperation", ")", ":", "return", "input_val", "if", "not", "m", "and", "p", "<", "0", ":", "return", "mark_safe", "(", "formats", ".", "number_format", "(", "'%d'", "%", "(", "int", "(", "d", ")", ")", ",", "0", ",", "force_grouping", "=", "force_grouping", ")", ",", ")", "exp", "=", "Decimal", "(", "1", ")", ".", "scaleb", "(", "-", "abs", "(", "p", ")", ")", "# Set the precision high enough to avoid an exception (#15789).", "tupl", "=", "d", ".", "as_tuple", "(", ")", "units", "=", "len", "(", "tupl", "[", "1", "]", ")", "units", "+=", "-", "tupl", "[", "2", "]", "if", "m", "else", "tupl", "[", "2", "]", "prec", "=", "abs", "(", "p", ")", "+", "units", "+", "1", "# Avoid conversion to scientific notation by accessing `sign`, `digits`,", "# and `exponent` from Decimal.as_tuple() directly.", "rounded_d", "=", "d", ".", "quantize", "(", "exp", ",", "ROUND_HALF_UP", ",", "Context", "(", "prec", "=", "prec", ")", ")", "sign", ",", "digits", ",", "exponent", "=", "rounded_d", ".", "as_tuple", "(", ")", "digits", "=", "[", "str", "(", "digit", ")", "for", "digit", "in", "reversed", "(", "digits", ")", "]", "while", "len", "(", "digits", ")", "<=", "abs", "(", "exponent", ")", ":", "digits", ".", "append", "(", "'0'", ")", "digits", ".", "insert", "(", "-", "exponent", ",", "'.'", ")", "if", "sign", "and", "rounded_d", ":", "digits", ".", "append", "(", "'-'", ")", "number", "=", "''", ".", "join", "(", "reversed", "(", "digits", ")", ")", "return", "mark_safe", "(", "formats", ".", "number_format", "(", "number", ",", "abs", "(", "p", ")", ",", "force_grouping", "=", "force_grouping", ")", ",", ")" ]
[ 94, 0 ]
[ 178, 5 ]
python
en
['en', 'error', 'th']
False
iriencode
(value)
Escape an IRI value for use in a URL.
Escape an IRI value for use in a URL.
def iriencode(value): """Escape an IRI value for use in a URL.""" return iri_to_uri(value)
[ "def", "iriencode", "(", "value", ")", ":", "return", "iri_to_uri", "(", "value", ")" ]
[ 183, 0 ]
[ 185, 28 ]
python
en
['en', 'en', 'en']
True
linenumbers
(value, autoescape=True)
Display text with line numbers.
Display text with line numbers.
def linenumbers(value, autoescape=True): """Display text with line numbers.""" lines = value.split('\n') # Find the maximum width of the line count, for use with zero padding # string format command width = str(len(str(len(lines)))) if not autoescape or isinstance(value, SafeData): for i, line in enumerate(lines): lines[i] = ("%0" + width + "d. %s") % (i + 1, line) else: for i, line in enumerate(lines): lines[i] = ("%0" + width + "d. %s") % (i + 1, escape(line)) return mark_safe('\n'.join(lines))
[ "def", "linenumbers", "(", "value", ",", "autoescape", "=", "True", ")", ":", "lines", "=", "value", ".", "split", "(", "'\\n'", ")", "# Find the maximum width of the line count, for use with zero padding", "# string format command", "width", "=", "str", "(", "len", "(", "str", "(", "len", "(", "lines", ")", ")", ")", ")", "if", "not", "autoescape", "or", "isinstance", "(", "value", ",", "SafeData", ")", ":", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "lines", "[", "i", "]", "=", "(", "\"%0\"", "+", "width", "+", "\"d. %s\"", ")", "%", "(", "i", "+", "1", ",", "line", ")", "else", ":", "for", "i", ",", "line", "in", "enumerate", "(", "lines", ")", ":", "lines", "[", "i", "]", "=", "(", "\"%0\"", "+", "width", "+", "\"d. %s\"", ")", "%", "(", "i", "+", "1", ",", "escape", "(", "line", ")", ")", "return", "mark_safe", "(", "'\\n'", ".", "join", "(", "lines", ")", ")" ]
[ 190, 0 ]
[ 202, 38 ]
python
en
['en', 'en', 'en']
True
lower
(value)
Convert a string into all lowercase.
Convert a string into all lowercase.
def lower(value): """Convert a string into all lowercase.""" return value.lower()
[ "def", "lower", "(", "value", ")", ":", "return", "value", ".", "lower", "(", ")" ]
[ 207, 0 ]
[ 209, 24 ]
python
en
['en', 'en', 'en']
True
make_list
(value)
Return the value turned into a list. For an integer, it's a list of digits. For a string, it's a list of characters.
Return the value turned into a list.
def make_list(value): """ Return the value turned into a list. For an integer, it's a list of digits. For a string, it's a list of characters. """ return list(value)
[ "def", "make_list", "(", "value", ")", ":", "return", "list", "(", "value", ")" ]
[ 214, 0 ]
[ 221, 22 ]
python
en
['en', 'error', 'th']
False
slugify
(value)
Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace.
Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace.
def slugify(value): """ Convert to ASCII. Convert spaces to hyphens. Remove characters that aren't alphanumerics, underscores, or hyphens. Convert to lowercase. Also strip leading and trailing whitespace. """ return _slugify(value)
[ "def", "slugify", "(", "value", ")", ":", "return", "_slugify", "(", "value", ")" ]
[ 226, 0 ]
[ 232, 26 ]
python
en
['en', 'error', 'th']
False
stringformat
(value, arg)
Format the variable according to the arg, a string formatting specifier. This specifier uses Python string formatting syntax, with the exception that the leading "%" is dropped. See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting for documentation of Python string formatting.
Format the variable according to the arg, a string formatting specifier.
def stringformat(value, arg): """ Format the variable according to the arg, a string formatting specifier. This specifier uses Python string formatting syntax, with the exception that the leading "%" is dropped. See https://docs.python.org/library/stdtypes.html#printf-style-string-formatting for documentation of Python string formatting. """ if isinstance(value, tuple): value = str(value) try: return ("%" + str(arg)) % value except (ValueError, TypeError): return ""
[ "def", "stringformat", "(", "value", ",", "arg", ")", ":", "if", "isinstance", "(", "value", ",", "tuple", ")", ":", "value", "=", "str", "(", "value", ")", "try", ":", "return", "(", "\"%\"", "+", "str", "(", "arg", ")", ")", "%", "value", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "\"\"" ]
[ 236, 0 ]
[ 251, 17 ]
python
en
['en', 'error', 'th']
False
title
(value)
Convert a string into titlecase.
Convert a string into titlecase.
def title(value): """Convert a string into titlecase.""" t = re.sub("([a-z])'([A-Z])", lambda m: m[0].lower(), value.title()) return re.sub(r'\d([A-Z])', lambda m: m[0].lower(), t)
[ "def", "title", "(", "value", ")", ":", "t", "=", "re", ".", "sub", "(", "\"([a-z])'([A-Z])\"", ",", "lambda", "m", ":", "m", "[", "0", "]", ".", "lower", "(", ")", ",", "value", ".", "title", "(", ")", ")", "return", "re", ".", "sub", "(", "r'\\d([A-Z])'", ",", "lambda", "m", ":", "m", "[", "0", "]", ".", "lower", "(", ")", ",", "t", ")" ]
[ 256, 0 ]
[ 259, 58 ]
python
en
['en', 'gl', 'en']
True
truncatechars
(value, arg)
Truncate a string after `arg` number of characters.
Truncate a string after `arg` number of characters.
def truncatechars(value, arg): """Truncate a string after `arg` number of characters.""" try: length = int(arg) except ValueError: # Invalid literal for int(). return value # Fail silently. return Truncator(value).chars(length)
[ "def", "truncatechars", "(", "value", ",", "arg", ")", ":", "try", ":", "length", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "# Invalid literal for int().", "return", "value", "# Fail silently.", "return", "Truncator", "(", "value", ")", ".", "chars", "(", "length", ")" ]
[ 264, 0 ]
[ 270, 41 ]
python
en
['en', 'en', 'en']
True
truncatechars_html
(value, arg)
Truncate HTML after `arg` number of chars. Preserve newlines in the HTML.
Truncate HTML after `arg` number of chars. Preserve newlines in the HTML.
def truncatechars_html(value, arg): """ Truncate HTML after `arg` number of chars. Preserve newlines in the HTML. """ try: length = int(arg) except ValueError: # invalid literal for int() return value # Fail silently. return Truncator(value).chars(length, html=True)
[ "def", "truncatechars_html", "(", "value", ",", "arg", ")", ":", "try", ":", "length", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "# invalid literal for int()", "return", "value", "# Fail silently.", "return", "Truncator", "(", "value", ")", ".", "chars", "(", "length", ",", "html", "=", "True", ")" ]
[ 275, 0 ]
[ 284, 52 ]
python
en
['en', 'error', 'th']
False
truncatewords
(value, arg)
Truncate a string after `arg` number of words. Remove newlines within the string.
Truncate a string after `arg` number of words. Remove newlines within the string.
def truncatewords(value, arg): """ Truncate a string after `arg` number of words. Remove newlines within the string. """ try: length = int(arg) except ValueError: # Invalid literal for int(). return value # Fail silently. return Truncator(value).words(length, truncate=' …')
[ "def", "truncatewords", "(", "value", ",", "arg", ")", ":", "try", ":", "length", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "# Invalid literal for int().", "return", "value", "# Fail silently.", "return", "Truncator", "(", "value", ")", ".", "words", "(", "length", ",", "truncate", "=", "' …')", "" ]
[ 289, 0 ]
[ 298, 58 ]
python
en
['en', 'error', 'th']
False
truncatewords_html
(value, arg)
Truncate HTML after `arg` number of words. Preserve newlines in the HTML.
Truncate HTML after `arg` number of words. Preserve newlines in the HTML.
def truncatewords_html(value, arg): """ Truncate HTML after `arg` number of words. Preserve newlines in the HTML. """ try: length = int(arg) except ValueError: # invalid literal for int() return value # Fail silently. return Truncator(value).words(length, html=True, truncate=' …')
[ "def", "truncatewords_html", "(", "value", ",", "arg", ")", ":", "try", ":", "length", "=", "int", "(", "arg", ")", "except", "ValueError", ":", "# invalid literal for int()", "return", "value", "# Fail silently.", "return", "Truncator", "(", "value", ")", ".", "words", "(", "length", ",", "html", "=", "True", ",", "truncate", "=", "' …')", "" ]
[ 303, 0 ]
[ 312, 69 ]
python
en
['en', 'error', 'th']
False
upper
(value)
Convert a string into all uppercase.
Convert a string into all uppercase.
def upper(value): """Convert a string into all uppercase.""" return value.upper()
[ "def", "upper", "(", "value", ")", ":", "return", "value", ".", "upper", "(", ")" ]
[ 317, 0 ]
[ 319, 24 ]
python
en
['en', 'en', 'en']
True
urlencode
(value, safe=None)
Escape a value for use in a URL. The ``safe`` parameter determines the characters which should not be escaped by Python's quote() function. If not provided, use the default safe characters (but an empty string can be provided when *all* characters should be escaped).
Escape a value for use in a URL.
def urlencode(value, safe=None): """ Escape a value for use in a URL. The ``safe`` parameter determines the characters which should not be escaped by Python's quote() function. If not provided, use the default safe characters (but an empty string can be provided when *all* characters should be escaped). """ kwargs = {} if safe is not None: kwargs['safe'] = safe return quote(value, **kwargs)
[ "def", "urlencode", "(", "value", ",", "safe", "=", "None", ")", ":", "kwargs", "=", "{", "}", "if", "safe", "is", "not", "None", ":", "kwargs", "[", "'safe'", "]", "=", "safe", "return", "quote", "(", "value", ",", "*", "*", "kwargs", ")" ]
[ 324, 0 ]
[ 336, 33 ]
python
en
['en', 'error', 'th']
False
urlize
(value, autoescape=True)
Convert URLs in plain text into clickable links.
Convert URLs in plain text into clickable links.
def urlize(value, autoescape=True): """Convert URLs in plain text into clickable links.""" return mark_safe(_urlize(value, nofollow=True, autoescape=autoescape))
[ "def", "urlize", "(", "value", ",", "autoescape", "=", "True", ")", ":", "return", "mark_safe", "(", "_urlize", "(", "value", ",", "nofollow", "=", "True", ",", "autoescape", "=", "autoescape", ")", ")" ]
[ 341, 0 ]
[ 343, 74 ]
python
en
['en', 'lb', 'en']
True
urlizetrunc
(value, limit, autoescape=True)
Convert URLs into clickable links, truncating URLs to the given character limit, and adding 'rel=nofollow' attribute to discourage spamming. Argument: Length to truncate URLs to.
Convert URLs into clickable links, truncating URLs to the given character limit, and adding 'rel=nofollow' attribute to discourage spamming.
def urlizetrunc(value, limit, autoescape=True): """ Convert URLs into clickable links, truncating URLs to the given character limit, and adding 'rel=nofollow' attribute to discourage spamming. Argument: Length to truncate URLs to. """ return mark_safe(_urlize(value, trim_url_limit=int(limit), nofollow=True, autoescape=autoescape))
[ "def", "urlizetrunc", "(", "value", ",", "limit", ",", "autoescape", "=", "True", ")", ":", "return", "mark_safe", "(", "_urlize", "(", "value", ",", "trim_url_limit", "=", "int", "(", "limit", ")", ",", "nofollow", "=", "True", ",", "autoescape", "=", "autoescape", ")", ")" ]
[ 348, 0 ]
[ 355, 101 ]
python
en
['en', 'error', 'th']
False
wordcount
(value)
Return the number of words.
Return the number of words.
def wordcount(value): """Return the number of words.""" return len(value.split())
[ "def", "wordcount", "(", "value", ")", ":", "return", "len", "(", "value", ".", "split", "(", ")", ")" ]
[ 360, 0 ]
[ 362, 29 ]
python
en
['en', 'en', 'en']
True
wordwrap
(value, arg)
Wrap words at `arg` line length.
Wrap words at `arg` line length.
def wordwrap(value, arg): """Wrap words at `arg` line length.""" return wrap(value, int(arg))
[ "def", "wordwrap", "(", "value", ",", "arg", ")", ":", "return", "wrap", "(", "value", ",", "int", "(", "arg", ")", ")" ]
[ 367, 0 ]
[ 369, 32 ]
python
en
['en', 'en', 'en']
True
ljust
(value, arg)
Left-align the value in a field of a given width.
Left-align the value in a field of a given width.
def ljust(value, arg): """Left-align the value in a field of a given width.""" return value.ljust(int(arg))
[ "def", "ljust", "(", "value", ",", "arg", ")", ":", "return", "value", ".", "ljust", "(", "int", "(", "arg", ")", ")" ]
[ 374, 0 ]
[ 376, 32 ]
python
en
['en', 'en', 'en']
True
rjust
(value, arg)
Right-align the value in a field of a given width.
Right-align the value in a field of a given width.
def rjust(value, arg): """Right-align the value in a field of a given width.""" return value.rjust(int(arg))
[ "def", "rjust", "(", "value", ",", "arg", ")", ":", "return", "value", ".", "rjust", "(", "int", "(", "arg", ")", ")" ]
[ 381, 0 ]
[ 383, 32 ]
python
en
['en', 'en', 'en']
True
center
(value, arg)
Center the value in a field of a given width.
Center the value in a field of a given width.
def center(value, arg): """Center the value in a field of a given width.""" return value.center(int(arg))
[ "def", "center", "(", "value", ",", "arg", ")", ":", "return", "value", ".", "center", "(", "int", "(", "arg", ")", ")" ]
[ 388, 0 ]
[ 390, 33 ]
python
en
['en', 'en', 'en']
True
cut
(value, arg)
Remove all values of arg from the given string.
Remove all values of arg from the given string.
def cut(value, arg): """Remove all values of arg from the given string.""" safe = isinstance(value, SafeData) value = value.replace(arg, '') if safe and arg != ';': return mark_safe(value) return value
[ "def", "cut", "(", "value", ",", "arg", ")", ":", "safe", "=", "isinstance", "(", "value", ",", "SafeData", ")", "value", "=", "value", ".", "replace", "(", "arg", ",", "''", ")", "if", "safe", "and", "arg", "!=", "';'", ":", "return", "mark_safe", "(", "value", ")", "return", "value" ]
[ 395, 0 ]
[ 401, 16 ]
python
en
['en', 'en', 'en']
True
escape_filter
(value)
Mark the value as a string that should be auto-escaped.
Mark the value as a string that should be auto-escaped.
def escape_filter(value): """Mark the value as a string that should be auto-escaped.""" return conditional_escape(value)
[ "def", "escape_filter", "(", "value", ")", ":", "return", "conditional_escape", "(", "value", ")" ]
[ 410, 0 ]
[ 412, 36 ]
python
en
['en', 'en', 'en']
True
force_escape
(value)
Escape a string's HTML. Return a new string containing the escaped characters (as opposed to "escape", which marks the content for later possible escaping).
Escape a string's HTML. Return a new string containing the escaped characters (as opposed to "escape", which marks the content for later possible escaping).
def force_escape(value): """ Escape a string's HTML. Return a new string containing the escaped characters (as opposed to "escape", which marks the content for later possible escaping). """ return escape(value)
[ "def", "force_escape", "(", "value", ")", ":", "return", "escape", "(", "value", ")" ]
[ 417, 0 ]
[ 423, 24 ]
python
en
['en', 'error', 'th']
False
linebreaks_filter
(value, autoescape=True)
Replace line breaks in plain text with appropriate HTML; a single newline becomes an HTML line break (``<br>``) and a new line followed by a blank line becomes a paragraph break (``</p>``).
Replace line breaks in plain text with appropriate HTML; a single newline becomes an HTML line break (``<br>``) and a new line followed by a blank line becomes a paragraph break (``</p>``).
def linebreaks_filter(value, autoescape=True): """ Replace line breaks in plain text with appropriate HTML; a single newline becomes an HTML line break (``<br>``) and a new line followed by a blank line becomes a paragraph break (``</p>``). """ autoescape = autoescape and not isinstance(value, SafeData) return mark_safe(linebreaks(value, autoescape))
[ "def", "linebreaks_filter", "(", "value", ",", "autoescape", "=", "True", ")", ":", "autoescape", "=", "autoescape", "and", "not", "isinstance", "(", "value", ",", "SafeData", ")", "return", "mark_safe", "(", "linebreaks", "(", "value", ",", "autoescape", ")", ")" ]
[ 428, 0 ]
[ 435, 51 ]
python
en
['en', 'error', 'th']
False
linebreaksbr
(value, autoescape=True)
Convert all newlines in a piece of plain text to HTML line breaks (``<br>``).
Convert all newlines in a piece of plain text to HTML line breaks (``<br>``).
def linebreaksbr(value, autoescape=True): """ Convert all newlines in a piece of plain text to HTML line breaks (``<br>``). """ autoescape = autoescape and not isinstance(value, SafeData) value = normalize_newlines(value) if autoescape: value = escape(value) return mark_safe(value.replace('\n', '<br>'))
[ "def", "linebreaksbr", "(", "value", ",", "autoescape", "=", "True", ")", ":", "autoescape", "=", "autoescape", "and", "not", "isinstance", "(", "value", ",", "SafeData", ")", "value", "=", "normalize_newlines", "(", "value", ")", "if", "autoescape", ":", "value", "=", "escape", "(", "value", ")", "return", "mark_safe", "(", "value", ".", "replace", "(", "'\\n'", ",", "'<br>'", ")", ")" ]
[ 440, 0 ]
[ 449, 49 ]
python
en
['en', 'error', 'th']
False
safe
(value)
Mark the value as a string that should not be auto-escaped.
Mark the value as a string that should not be auto-escaped.
def safe(value): """Mark the value as a string that should not be auto-escaped.""" return mark_safe(value)
[ "def", "safe", "(", "value", ")", ":", "return", "mark_safe", "(", "value", ")" ]
[ 454, 0 ]
[ 456, 27 ]
python
en
['en', 'en', 'en']
True
safeseq
(value)
A "safe" filter for sequences. Mark each element in the sequence, individually, as safe, after converting them to strings. Return a list with the results.
A "safe" filter for sequences. Mark each element in the sequence, individually, as safe, after converting them to strings. Return a list with the results.
def safeseq(value): """ A "safe" filter for sequences. Mark each element in the sequence, individually, as safe, after converting them to strings. Return a list with the results. """ return [mark_safe(obj) for obj in value]
[ "def", "safeseq", "(", "value", ")", ":", "return", "[", "mark_safe", "(", "obj", ")", "for", "obj", "in", "value", "]" ]
[ 460, 0 ]
[ 466, 44 ]
python
en
['en', 'error', 'th']
False
striptags
(value)
Strip all [X]HTML tags.
Strip all [X]HTML tags.
def striptags(value): """Strip all [X]HTML tags.""" return strip_tags(value)
[ "def", "striptags", "(", "value", ")", ":", "return", "strip_tags", "(", "value", ")" ]
[ 471, 0 ]
[ 473, 28 ]
python
en
['en', 'mt', 'en']
True
_property_resolver
(arg)
When arg is convertible to float, behave like operator.itemgetter(arg) Otherwise, behave like Variable(arg).resolve >>> _property_resolver(1)('abc') 'b' >>> _property_resolver('1')('abc') Traceback (most recent call last): ... TypeError: string indices must be integers >>> class Foo: ... a = 42 ... b = 3.14 ... c = 'Hey!' >>> _property_resolver('b')(Foo()) 3.14
When arg is convertible to float, behave like operator.itemgetter(arg) Otherwise, behave like Variable(arg).resolve
def _property_resolver(arg): """ When arg is convertible to float, behave like operator.itemgetter(arg) Otherwise, behave like Variable(arg).resolve >>> _property_resolver(1)('abc') 'b' >>> _property_resolver('1')('abc') Traceback (most recent call last): ... TypeError: string indices must be integers >>> class Foo: ... a = 42 ... b = 3.14 ... c = 'Hey!' >>> _property_resolver('b')(Foo()) 3.14 """ try: float(arg) except ValueError: return Variable(arg).resolve else: return itemgetter(arg)
[ "def", "_property_resolver", "(", "arg", ")", ":", "try", ":", "float", "(", "arg", ")", "except", "ValueError", ":", "return", "Variable", "(", "arg", ")", ".", "resolve", "else", ":", "return", "itemgetter", "(", "arg", ")" ]
[ 480, 0 ]
[ 503, 30 ]
python
en
['en', 'error', 'th']
False
dictsort
(value, arg)
Given a list of dicts, return that list sorted by the property given in the argument.
Given a list of dicts, return that list sorted by the property given in the argument.
def dictsort(value, arg): """ Given a list of dicts, return that list sorted by the property given in the argument. """ try: return sorted(value, key=_property_resolver(arg)) except (TypeError, VariableDoesNotExist): return ''
[ "def", "dictsort", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "sorted", "(", "value", ",", "key", "=", "_property_resolver", "(", "arg", ")", ")", "except", "(", "TypeError", ",", "VariableDoesNotExist", ")", ":", "return", "''" ]
[ 507, 0 ]
[ 515, 17 ]
python
en
['en', 'error', 'th']
False
dictsortreversed
(value, arg)
Given a list of dicts, return that list sorted in reverse order by the property given in the argument.
Given a list of dicts, return that list sorted in reverse order by the property given in the argument.
def dictsortreversed(value, arg): """ Given a list of dicts, return that list sorted in reverse order by the property given in the argument. """ try: return sorted(value, key=_property_resolver(arg), reverse=True) except (TypeError, VariableDoesNotExist): return ''
[ "def", "dictsortreversed", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "sorted", "(", "value", ",", "key", "=", "_property_resolver", "(", "arg", ")", ",", "reverse", "=", "True", ")", "except", "(", "TypeError", ",", "VariableDoesNotExist", ")", ":", "return", "''" ]
[ 519, 0 ]
[ 527, 17 ]
python
en
['en', 'error', 'th']
False
first
(value)
Return the first item in a list.
Return the first item in a list.
def first(value): """Return the first item in a list.""" try: return value[0] except IndexError: return ''
[ "def", "first", "(", "value", ")", ":", "try", ":", "return", "value", "[", "0", "]", "except", "IndexError", ":", "return", "''" ]
[ 531, 0 ]
[ 536, 17 ]
python
en
['en', 'en', 'en']
True
join
(value, arg, autoescape=True)
Join a list with a string, like Python's ``str.join(list)``.
Join a list with a string, like Python's ``str.join(list)``.
def join(value, arg, autoescape=True): """Join a list with a string, like Python's ``str.join(list)``.""" try: if autoescape: value = [conditional_escape(v) for v in value] data = conditional_escape(arg).join(value) except TypeError: # Fail silently if arg isn't iterable. return value return mark_safe(data)
[ "def", "join", "(", "value", ",", "arg", ",", "autoescape", "=", "True", ")", ":", "try", ":", "if", "autoescape", ":", "value", "=", "[", "conditional_escape", "(", "v", ")", "for", "v", "in", "value", "]", "data", "=", "conditional_escape", "(", "arg", ")", ".", "join", "(", "value", ")", "except", "TypeError", ":", "# Fail silently if arg isn't iterable.", "return", "value", "return", "mark_safe", "(", "data", ")" ]
[ 540, 0 ]
[ 548, 26 ]
python
en
['en', 'en', 'en']
True
last
(value)
Return the last item in a list.
Return the last item in a list.
def last(value): """Return the last item in a list.""" try: return value[-1] except IndexError: return ''
[ "def", "last", "(", "value", ")", ":", "try", ":", "return", "value", "[", "-", "1", "]", "except", "IndexError", ":", "return", "''" ]
[ 552, 0 ]
[ 557, 17 ]
python
en
['en', 'en', 'en']
True
length
(value)
Return the length of the value - useful for lists.
Return the length of the value - useful for lists.
def length(value): """Return the length of the value - useful for lists.""" try: return len(value) except (ValueError, TypeError): return 0
[ "def", "length", "(", "value", ")", ":", "try", ":", "return", "len", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "0" ]
[ 561, 0 ]
[ 566, 16 ]
python
en
['en', 'en', 'en']
True
length_is
(value, arg)
Return a boolean of whether the value's length is the argument.
Return a boolean of whether the value's length is the argument.
def length_is(value, arg): """Return a boolean of whether the value's length is the argument.""" try: return len(value) == int(arg) except (ValueError, TypeError): return ''
[ "def", "length_is", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "len", "(", "value", ")", "==", "int", "(", "arg", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "''" ]
[ 570, 0 ]
[ 575, 17 ]
python
en
['en', 'en', 'en']
True
random
(value)
Return a random item from the list.
Return a random item from the list.
def random(value): """Return a random item from the list.""" return random_module.choice(value)
[ "def", "random", "(", "value", ")", ":", "return", "random_module", ".", "choice", "(", "value", ")" ]
[ 579, 0 ]
[ 581, 38 ]
python
en
['en', 'mt', 'en']
True
slice_filter
(value, arg)
Return a slice of the list using the same syntax as Python's list slicing.
Return a slice of the list using the same syntax as Python's list slicing.
def slice_filter(value, arg): """ Return a slice of the list using the same syntax as Python's list slicing. """ try: bits = [] for x in str(arg).split(':'): if not x: bits.append(None) else: bits.append(int(x)) return value[slice(*bits)] except (ValueError, TypeError): return value
[ "def", "slice_filter", "(", "value", ",", "arg", ")", ":", "try", ":", "bits", "=", "[", "]", "for", "x", "in", "str", "(", "arg", ")", ".", "split", "(", "':'", ")", ":", "if", "not", "x", ":", "bits", ".", "append", "(", "None", ")", "else", ":", "bits", ".", "append", "(", "int", "(", "x", ")", ")", "return", "value", "[", "slice", "(", "*", "bits", ")", "]", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "value" ]
[ 585, 0 ]
[ 599, 20 ]
python
en
['en', 'error', 'th']
False
unordered_list
(value, autoescape=True)
Recursively take a self-nested list and return an HTML unordered list -- WITHOUT opening and closing <ul> tags. Assume the list is in the proper format. For example, if ``var`` contains: ``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then ``{{ var|unordered_list }}`` returns:: <li>States <ul> <li>Kansas <ul> <li>Lawrence</li> <li>Topeka</li> </ul> </li> <li>Illinois</li> </ul> </li>
Recursively take a self-nested list and return an HTML unordered list -- WITHOUT opening and closing <ul> tags.
def unordered_list(value, autoescape=True): """ Recursively take a self-nested list and return an HTML unordered list -- WITHOUT opening and closing <ul> tags. Assume the list is in the proper format. For example, if ``var`` contains: ``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``, then ``{{ var|unordered_list }}`` returns:: <li>States <ul> <li>Kansas <ul> <li>Lawrence</li> <li>Topeka</li> </ul> </li> <li>Illinois</li> </ul> </li> """ if autoescape: escaper = conditional_escape else: def escaper(x): return x def walk_items(item_list): item_iterator = iter(item_list) try: item = next(item_iterator) while True: try: next_item = next(item_iterator) except StopIteration: yield item, None break if isinstance(next_item, (list, tuple, types.GeneratorType)): try: iter(next_item) except TypeError: pass else: yield item, next_item item = next(item_iterator) continue yield item, None item = next_item except StopIteration: pass def list_formatter(item_list, tabs=1): indent = '\t' * tabs output = [] for item, children in walk_items(item_list): sublist = '' if children: sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % ( indent, list_formatter(children, tabs + 1), indent, indent) output.append('%s<li>%s%s</li>' % ( indent, escaper(item), sublist)) return '\n'.join(output) return mark_safe(list_formatter(value))
[ "def", "unordered_list", "(", "value", ",", "autoescape", "=", "True", ")", ":", "if", "autoescape", ":", "escaper", "=", "conditional_escape", "else", ":", "def", "escaper", "(", "x", ")", ":", "return", "x", "def", "walk_items", "(", "item_list", ")", ":", "item_iterator", "=", "iter", "(", "item_list", ")", "try", ":", "item", "=", "next", "(", "item_iterator", ")", "while", "True", ":", "try", ":", "next_item", "=", "next", "(", "item_iterator", ")", "except", "StopIteration", ":", "yield", "item", ",", "None", "break", "if", "isinstance", "(", "next_item", ",", "(", "list", ",", "tuple", ",", "types", ".", "GeneratorType", ")", ")", ":", "try", ":", "iter", "(", "next_item", ")", "except", "TypeError", ":", "pass", "else", ":", "yield", "item", ",", "next_item", "item", "=", "next", "(", "item_iterator", ")", "continue", "yield", "item", ",", "None", "item", "=", "next_item", "except", "StopIteration", ":", "pass", "def", "list_formatter", "(", "item_list", ",", "tabs", "=", "1", ")", ":", "indent", "=", "'\\t'", "*", "tabs", "output", "=", "[", "]", "for", "item", ",", "children", "in", "walk_items", "(", "item_list", ")", ":", "sublist", "=", "''", "if", "children", ":", "sublist", "=", "'\\n%s<ul>\\n%s\\n%s</ul>\\n%s'", "%", "(", "indent", ",", "list_formatter", "(", "children", ",", "tabs", "+", "1", ")", ",", "indent", ",", "indent", ")", "output", ".", "append", "(", "'%s<li>%s%s</li>'", "%", "(", "indent", ",", "escaper", "(", "item", ")", ",", "sublist", ")", ")", "return", "'\\n'", ".", "join", "(", "output", ")", "return", "mark_safe", "(", "list_formatter", "(", "value", ")", ")" ]
[ 603, 0 ]
[ 666, 43 ]
python
en
['en', 'error', 'th']
False
add
(value, arg)
Add the arg to the value.
Add the arg to the value.
def add(value, arg): """Add the arg to the value.""" try: return int(value) + int(arg) except (ValueError, TypeError): try: return value + arg except Exception: return ''
[ "def", "add", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "int", "(", "value", ")", "+", "int", "(", "arg", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "try", ":", "return", "value", "+", "arg", "except", "Exception", ":", "return", "''" ]
[ 674, 0 ]
[ 682, 21 ]
python
en
['en', 'en', 'en']
True
get_digit
(value, arg)
Given a whole number, return the requested digit of it, where 1 is the right-most digit, 2 is the second-right-most digit, etc. Return the original value for invalid input (if input or argument is not an integer, or if argument is less than 1). Otherwise, output is always an integer.
Given a whole number, return the requested digit of it, where 1 is the right-most digit, 2 is the second-right-most digit, etc. Return the original value for invalid input (if input or argument is not an integer, or if argument is less than 1). Otherwise, output is always an integer.
def get_digit(value, arg): """ Given a whole number, return the requested digit of it, where 1 is the right-most digit, 2 is the second-right-most digit, etc. Return the original value for invalid input (if input or argument is not an integer, or if argument is less than 1). Otherwise, output is always an integer. """ try: arg = int(arg) value = int(value) except ValueError: return value # Fail silently for an invalid argument if arg < 1: return value try: return int(str(value)[-arg]) except IndexError: return 0
[ "def", "get_digit", "(", "value", ",", "arg", ")", ":", "try", ":", "arg", "=", "int", "(", "arg", ")", "value", "=", "int", "(", "value", ")", "except", "ValueError", ":", "return", "value", "# Fail silently for an invalid argument", "if", "arg", "<", "1", ":", "return", "value", "try", ":", "return", "int", "(", "str", "(", "value", ")", "[", "-", "arg", "]", ")", "except", "IndexError", ":", "return", "0" ]
[ 686, 0 ]
[ 703, 16 ]
python
en
['en', 'error', 'th']
False
date
(value, arg=None)
Format a date according to the given format.
Format a date according to the given format.
def date(value, arg=None): """Format a date according to the given format.""" if value in (None, ''): return '' try: return formats.date_format(value, arg) except AttributeError: try: return format(value, arg) except AttributeError: return ''
[ "def", "date", "(", "value", ",", "arg", "=", "None", ")", ":", "if", "value", "in", "(", "None", ",", "''", ")", ":", "return", "''", "try", ":", "return", "formats", ".", "date_format", "(", "value", ",", "arg", ")", "except", "AttributeError", ":", "try", ":", "return", "format", "(", "value", ",", "arg", ")", "except", "AttributeError", ":", "return", "''" ]
[ 711, 0 ]
[ 721, 21 ]
python
en
['en', 'en', 'en']
True
time
(value, arg=None)
Format a time according to the given format.
Format a time according to the given format.
def time(value, arg=None): """Format a time according to the given format.""" if value in (None, ''): return '' try: return formats.time_format(value, arg) except (AttributeError, TypeError): try: return time_format(value, arg) except (AttributeError, TypeError): return ''
[ "def", "time", "(", "value", ",", "arg", "=", "None", ")", ":", "if", "value", "in", "(", "None", ",", "''", ")", ":", "return", "''", "try", ":", "return", "formats", ".", "time_format", "(", "value", ",", "arg", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "try", ":", "return", "time_format", "(", "value", ",", "arg", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "return", "''" ]
[ 725, 0 ]
[ 735, 21 ]
python
en
['en', 'en', 'en']
True
timesince_filter
(value, arg=None)
Format a date as the time since that date (i.e. "4 days, 6 hours").
Format a date as the time since that date (i.e. "4 days, 6 hours").
def timesince_filter(value, arg=None): """Format a date as the time since that date (i.e. "4 days, 6 hours").""" if not value: return '' try: if arg: return timesince(value, arg) return timesince(value) except (ValueError, TypeError): return ''
[ "def", "timesince_filter", "(", "value", ",", "arg", "=", "None", ")", ":", "if", "not", "value", ":", "return", "''", "try", ":", "if", "arg", ":", "return", "timesince", "(", "value", ",", "arg", ")", "return", "timesince", "(", "value", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "''" ]
[ 739, 0 ]
[ 748, 17 ]
python
en
['en', 'en', 'en']
True
timeuntil_filter
(value, arg=None)
Format a date as the time until that date (i.e. "4 days, 6 hours").
Format a date as the time until that date (i.e. "4 days, 6 hours").
def timeuntil_filter(value, arg=None): """Format a date as the time until that date (i.e. "4 days, 6 hours").""" if not value: return '' try: return timeuntil(value, arg) except (ValueError, TypeError): return ''
[ "def", "timeuntil_filter", "(", "value", ",", "arg", "=", "None", ")", ":", "if", "not", "value", ":", "return", "''", "try", ":", "return", "timeuntil", "(", "value", ",", "arg", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "return", "''" ]
[ 752, 0 ]
[ 759, 17 ]
python
en
['en', 'en', 'en']
True
default
(value, arg)
If value is unavailable, use given default.
If value is unavailable, use given default.
def default(value, arg): """If value is unavailable, use given default.""" return value or arg
[ "def", "default", "(", "value", ",", "arg", ")", ":", "return", "value", "or", "arg" ]
[ 767, 0 ]
[ 769, 23 ]
python
en
['en', 'en', 'en']
True
default_if_none
(value, arg)
If value is None, use given default.
If value is None, use given default.
def default_if_none(value, arg): """If value is None, use given default.""" if value is None: return arg return value
[ "def", "default_if_none", "(", "value", ",", "arg", ")", ":", "if", "value", "is", "None", ":", "return", "arg", "return", "value" ]
[ 773, 0 ]
[ 777, 16 ]
python
en
['en', 'en', 'en']
True
divisibleby
(value, arg)
Return True if the value is divisible by the argument.
Return True if the value is divisible by the argument.
def divisibleby(value, arg): """Return True if the value is divisible by the argument.""" return int(value) % int(arg) == 0
[ "def", "divisibleby", "(", "value", ",", "arg", ")", ":", "return", "int", "(", "value", ")", "%", "int", "(", "arg", ")", "==", "0" ]
[ 781, 0 ]
[ 783, 37 ]
python
en
['en', 'en', 'en']
True
yesno
(value, arg=None)
Given a string mapping values for true, false, and (optionally) None, return one of those strings according to the value: ========== ====================== ================================== Value Argument Outputs ========== ====================== ================================== ``True`` ``"yeah,no,maybe"`` ``yeah`` ``False`` ``"yeah,no,maybe"`` ``no`` ``None`` ``"yeah,no,maybe"`` ``maybe`` ``None`` ``"yeah,no"`` ``"no"`` (converts None to False if no mapping for None is given. ========== ====================== ==================================
Given a string mapping values for true, false, and (optionally) None, return one of those strings according to the value:
def yesno(value, arg=None): """ Given a string mapping values for true, false, and (optionally) None, return one of those strings according to the value: ========== ====================== ================================== Value Argument Outputs ========== ====================== ================================== ``True`` ``"yeah,no,maybe"`` ``yeah`` ``False`` ``"yeah,no,maybe"`` ``no`` ``None`` ``"yeah,no,maybe"`` ``maybe`` ``None`` ``"yeah,no"`` ``"no"`` (converts None to False if no mapping for None is given. ========== ====================== ================================== """ if arg is None: # Translators: Please do not add spaces around commas. arg = gettext('yes,no,maybe') bits = arg.split(',') if len(bits) < 2: return value # Invalid arg. try: yes, no, maybe = bits except ValueError: # Unpack list of wrong size (no "maybe" value provided). yes, no, maybe = bits[0], bits[1], bits[1] if value is None: return maybe if value: return yes return no
[ "def", "yesno", "(", "value", ",", "arg", "=", "None", ")", ":", "if", "arg", "is", "None", ":", "# Translators: Please do not add spaces around commas.", "arg", "=", "gettext", "(", "'yes,no,maybe'", ")", "bits", "=", "arg", ".", "split", "(", "','", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "return", "value", "# Invalid arg.", "try", ":", "yes", ",", "no", ",", "maybe", "=", "bits", "except", "ValueError", ":", "# Unpack list of wrong size (no \"maybe\" value provided).", "yes", ",", "no", ",", "maybe", "=", "bits", "[", "0", "]", ",", "bits", "[", "1", "]", ",", "bits", "[", "1", "]", "if", "value", "is", "None", ":", "return", "maybe", "if", "value", ":", "return", "yes", "return", "no" ]
[ 787, 0 ]
[ 817, 13 ]
python
en
['en', 'error', 'th']
False
filesizeformat
(bytes_)
Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc.).
Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc.).
def filesizeformat(bytes_): """ Format the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB, 102 bytes, etc.). """ try: bytes_ = int(bytes_) except (TypeError, ValueError, UnicodeDecodeError): value = ngettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0} return avoid_wrapping(value) def filesize_number_format(value): return formats.number_format(round(value, 1), 1) KB = 1 << 10 MB = 1 << 20 GB = 1 << 30 TB = 1 << 40 PB = 1 << 50 negative = bytes_ < 0 if negative: bytes_ = -bytes_ # Allow formatting of negative numbers. if bytes_ < KB: value = ngettext("%(size)d byte", "%(size)d bytes", bytes_) % {'size': bytes_} elif bytes_ < MB: value = gettext("%s KB") % filesize_number_format(bytes_ / KB) elif bytes_ < GB: value = gettext("%s MB") % filesize_number_format(bytes_ / MB) elif bytes_ < TB: value = gettext("%s GB") % filesize_number_format(bytes_ / GB) elif bytes_ < PB: value = gettext("%s TB") % filesize_number_format(bytes_ / TB) else: value = gettext("%s PB") % filesize_number_format(bytes_ / PB) if negative: value = "-%s" % value return avoid_wrapping(value)
[ "def", "filesizeformat", "(", "bytes_", ")", ":", "try", ":", "bytes_", "=", "int", "(", "bytes_", ")", "except", "(", "TypeError", ",", "ValueError", ",", "UnicodeDecodeError", ")", ":", "value", "=", "ngettext", "(", "\"%(size)d byte\"", ",", "\"%(size)d bytes\"", ",", "0", ")", "%", "{", "'size'", ":", "0", "}", "return", "avoid_wrapping", "(", "value", ")", "def", "filesize_number_format", "(", "value", ")", ":", "return", "formats", ".", "number_format", "(", "round", "(", "value", ",", "1", ")", ",", "1", ")", "KB", "=", "1", "<<", "10", "MB", "=", "1", "<<", "20", "GB", "=", "1", "<<", "30", "TB", "=", "1", "<<", "40", "PB", "=", "1", "<<", "50", "negative", "=", "bytes_", "<", "0", "if", "negative", ":", "bytes_", "=", "-", "bytes_", "# Allow formatting of negative numbers.", "if", "bytes_", "<", "KB", ":", "value", "=", "ngettext", "(", "\"%(size)d byte\"", ",", "\"%(size)d bytes\"", ",", "bytes_", ")", "%", "{", "'size'", ":", "bytes_", "}", "elif", "bytes_", "<", "MB", ":", "value", "=", "gettext", "(", "\"%s KB\"", ")", "%", "filesize_number_format", "(", "bytes_", "/", "KB", ")", "elif", "bytes_", "<", "GB", ":", "value", "=", "gettext", "(", "\"%s MB\"", ")", "%", "filesize_number_format", "(", "bytes_", "/", "MB", ")", "elif", "bytes_", "<", "TB", ":", "value", "=", "gettext", "(", "\"%s GB\"", ")", "%", "filesize_number_format", "(", "bytes_", "/", "GB", ")", "elif", "bytes_", "<", "PB", ":", "value", "=", "gettext", "(", "\"%s TB\"", ")", "%", "filesize_number_format", "(", "bytes_", "/", "TB", ")", "else", ":", "value", "=", "gettext", "(", "\"%s PB\"", ")", "%", "filesize_number_format", "(", "bytes_", "/", "PB", ")", "if", "negative", ":", "value", "=", "\"-%s\"", "%", "value", "return", "avoid_wrapping", "(", "value", ")" ]
[ 825, 0 ]
[ 864, 32 ]
python
en
['en', 'error', 'th']
False
pluralize
(value, arg='s')
Return a plural suffix if the value is not 1, '1', or an object of length 1. By default, use 's' as the suffix: * If value is 0, vote{{ value|pluralize }} display "votes". * If value is 1, vote{{ value|pluralize }} display "vote". * If value is 2, vote{{ value|pluralize }} display "votes". If an argument is provided, use that string instead: * If value is 0, class{{ value|pluralize:"es" }} display "classes". * If value is 1, class{{ value|pluralize:"es" }} display "class". * If value is 2, class{{ value|pluralize:"es" }} display "classes". If the provided argument contains a comma, use the text before the comma for the singular case and the text after the comma for the plural case: * If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies". * If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy". * If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies".
Return a plural suffix if the value is not 1, '1', or an object of length 1. By default, use 's' as the suffix:
def pluralize(value, arg='s'): """ Return a plural suffix if the value is not 1, '1', or an object of length 1. By default, use 's' as the suffix: * If value is 0, vote{{ value|pluralize }} display "votes". * If value is 1, vote{{ value|pluralize }} display "vote". * If value is 2, vote{{ value|pluralize }} display "votes". If an argument is provided, use that string instead: * If value is 0, class{{ value|pluralize:"es" }} display "classes". * If value is 1, class{{ value|pluralize:"es" }} display "class". * If value is 2, class{{ value|pluralize:"es" }} display "classes". If the provided argument contains a comma, use the text before the comma for the singular case and the text after the comma for the plural case: * If value is 0, cand{{ value|pluralize:"y,ies" }} display "candies". * If value is 1, cand{{ value|pluralize:"y,ies" }} display "candy". * If value is 2, cand{{ value|pluralize:"y,ies" }} display "candies". """ if ',' not in arg: arg = ',' + arg bits = arg.split(',') if len(bits) > 2: return '' singular_suffix, plural_suffix = bits[:2] try: return singular_suffix if float(value) == 1 else plural_suffix except ValueError: # Invalid string that's not a number. pass except TypeError: # Value isn't a string or a number; maybe it's a list? try: return singular_suffix if len(value) == 1 else plural_suffix except TypeError: # len() of unsized object. pass return ''
[ "def", "pluralize", "(", "value", ",", "arg", "=", "'s'", ")", ":", "if", "','", "not", "in", "arg", ":", "arg", "=", "','", "+", "arg", "bits", "=", "arg", ".", "split", "(", "','", ")", "if", "len", "(", "bits", ")", ">", "2", ":", "return", "''", "singular_suffix", ",", "plural_suffix", "=", "bits", "[", ":", "2", "]", "try", ":", "return", "singular_suffix", "if", "float", "(", "value", ")", "==", "1", "else", "plural_suffix", "except", "ValueError", ":", "# Invalid string that's not a number.", "pass", "except", "TypeError", ":", "# Value isn't a string or a number; maybe it's a list?", "try", ":", "return", "singular_suffix", "if", "len", "(", "value", ")", "==", "1", "else", "plural_suffix", "except", "TypeError", ":", "# len() of unsized object.", "pass", "return", "''" ]
[ 868, 0 ]
[ 906, 13 ]
python
en
['en', 'error', 'th']
False
phone2numeric_filter
(value)
Take a phone number and converts it in to its numerical equivalent.
Take a phone number and converts it in to its numerical equivalent.
def phone2numeric_filter(value): """Take a phone number and converts it in to its numerical equivalent.""" return phone2numeric(value)
[ "def", "phone2numeric_filter", "(", "value", ")", ":", "return", "phone2numeric", "(", "value", ")" ]
[ 910, 0 ]
[ 912, 31 ]
python
en
['en', 'en', 'en']
True
pprint
(value)
A wrapper around pprint.pprint -- for debugging, really.
A wrapper around pprint.pprint -- for debugging, really.
def pprint(value): """A wrapper around pprint.pprint -- for debugging, really.""" try: return pformat(value) except Exception as e: return "Error in formatting: %s: %s" % (e.__class__.__name__, e)
[ "def", "pprint", "(", "value", ")", ":", "try", ":", "return", "pformat", "(", "value", ")", "except", "Exception", "as", "e", ":", "return", "\"Error in formatting: %s: %s\"", "%", "(", "e", ".", "__class__", ".", "__name__", ",", "e", ")" ]
[ 916, 0 ]
[ 921, 72 ]
python
en
['en', 'en', 'en']
True
glibc_version_string
()
Returns glibc version string, or None if not using glibc.
Returns glibc version string, or None if not using glibc.
def glibc_version_string(): # type: () -> Optional[str] "Returns glibc version string, or None if not using glibc." return glibc_version_string_confstr() or glibc_version_string_ctypes()
[ "def", "glibc_version_string", "(", ")", ":", "# type: () -> Optional[str]", "return", "glibc_version_string_confstr", "(", ")", "or", "glibc_version_string_ctypes", "(", ")" ]
[ 8, 0 ]
[ 11, 74 ]
python
en
['en', 'en', 'en']
True
glibc_version_string_confstr
()
Primary implementation of glibc_version_string using os.confstr.
Primary implementation of glibc_version_string using os.confstr.
def glibc_version_string_confstr(): # type: () -> Optional[str] "Primary implementation of glibc_version_string using os.confstr." # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely # to be broken or missing. This strategy is used in the standard library # platform module: # https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183 if sys.platform == "win32": return None try: # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17": _, version = os.confstr("CS_GNU_LIBC_VERSION").split() except (AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None return version
[ "def", "glibc_version_string_confstr", "(", ")", ":", "# type: () -> Optional[str]", "# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely", "# to be broken or missing. This strategy is used in the standard library", "# platform module:", "# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183", "if", "sys", ".", "platform", "==", "\"win32\"", ":", "return", "None", "try", ":", "# os.confstr(\"CS_GNU_LIBC_VERSION\") returns a string like \"glibc 2.17\":", "_", ",", "version", "=", "os", ".", "confstr", "(", "\"CS_GNU_LIBC_VERSION\"", ")", ".", "split", "(", ")", "except", "(", "AttributeError", ",", "OSError", ",", "ValueError", ")", ":", "# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...", "return", "None", "return", "version" ]
[ 14, 0 ]
[ 29, 18 ]
python
en
['en', 'en', 'en']
True
glibc_version_string_ctypes
()
Fallback implementation of glibc_version_string using ctypes.
Fallback implementation of glibc_version_string using ctypes.
def glibc_version_string_ctypes(): # type: () -> Optional[str] "Fallback implementation of glibc_version_string using ctypes." try: import ctypes except ImportError: return None # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the # main program". This way we can let the linker do the work to figure out # which libc our process is actually using. process_namespace = ctypes.CDLL(None) try: gnu_get_libc_version = process_namespace.gnu_get_libc_version except AttributeError: # Symbol doesn't exist -> therefore, we are not linked to # glibc. return None # Call gnu_get_libc_version, which returns a string like "2.5" gnu_get_libc_version.restype = ctypes.c_char_p version_str = gnu_get_libc_version() # py2 / py3 compatibility: if not isinstance(version_str, str): version_str = version_str.decode("ascii") return version_str
[ "def", "glibc_version_string_ctypes", "(", ")", ":", "# type: () -> Optional[str]", "try", ":", "import", "ctypes", "except", "ImportError", ":", "return", "None", "# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen", "# manpage says, \"If filename is NULL, then the returned handle is for the", "# main program\". This way we can let the linker do the work to figure out", "# which libc our process is actually using.", "process_namespace", "=", "ctypes", ".", "CDLL", "(", "None", ")", "try", ":", "gnu_get_libc_version", "=", "process_namespace", ".", "gnu_get_libc_version", "except", "AttributeError", ":", "# Symbol doesn't exist -> therefore, we are not linked to", "# glibc.", "return", "None", "# Call gnu_get_libc_version, which returns a string like \"2.5\"", "gnu_get_libc_version", ".", "restype", "=", "ctypes", ".", "c_char_p", "version_str", "=", "gnu_get_libc_version", "(", ")", "# py2 / py3 compatibility:", "if", "not", "isinstance", "(", "version_str", ",", "str", ")", ":", "version_str", "=", "version_str", ".", "decode", "(", "\"ascii\"", ")", "return", "version_str" ]
[ 32, 0 ]
[ 60, 22 ]
python
en
['en', 'en', 'en']
True
libc_ver
()
Try to determine the glibc version Returns a tuple of strings (lib, version) which default to empty strings in case the lookup fails.
Try to determine the glibc version
def libc_ver(): # type: () -> Tuple[str, str] """Try to determine the glibc version Returns a tuple of strings (lib, version) which default to empty strings in case the lookup fails. """ glibc_version = glibc_version_string() if glibc_version is None: return ("", "") else: return ("glibc", glibc_version)
[ "def", "libc_ver", "(", ")", ":", "# type: () -> Tuple[str, str]", "glibc_version", "=", "glibc_version_string", "(", ")", "if", "glibc_version", "is", "None", ":", "return", "(", "\"\"", ",", "\"\"", ")", "else", ":", "return", "(", "\"glibc\"", ",", "glibc_version", ")" ]
[ 80, 0 ]
[ 91, 39 ]
python
en
['en', 'en', 'en']
True
git_changed_recipes
(git_rev='HEAD@{1}', stop_rev=None, git_root='.')
Get the list of files changed in a git revision and return a list of package directories that have been modified. git_rev: if stop_rev is not provided, this represents the changes introduced by the given git rev. It is equivalent to git_rev=SOME_REV@{1} and stop_rev=SOME_REV stop_rev: when provided, this is the end of a range of revisions to consider. git_rev becomes the start revision. Note that the start revision is *one before* the actual start of examining commits for changes. In other words: git_rev=SOME_REV@{1} and stop_rev=SOME_REV => only SOME_REV git_rev=SOME_REV@{2} and stop_rev=SOME_REV => two commits, SOME_REV and the one before it
Get the list of files changed in a git revision and return a list of package directories that have been modified.
def git_changed_recipes(git_rev='HEAD@{1}', stop_rev=None, git_root='.'): """ Get the list of files changed in a git revision and return a list of package directories that have been modified. git_rev: if stop_rev is not provided, this represents the changes introduced by the given git rev. It is equivalent to git_rev=SOME_REV@{1} and stop_rev=SOME_REV stop_rev: when provided, this is the end of a range of revisions to consider. git_rev becomes the start revision. Note that the start revision is *one before* the actual start of examining commits for changes. In other words: git_rev=SOME_REV@{1} and stop_rev=SOME_REV => only SOME_REV git_rev=SOME_REV@{2} and stop_rev=SOME_REV => two commits, SOME_REV and the one before it """ changed_files = _git_changed_files(git_rev, stop_rev, git_root) recipe_dirs = _get_base_folders(git_root, changed_files) changed_submodules = git_changed_submodules(git_rev, stop_rev, git_root) new_submodules = git_new_submodules(git_rev, stop_rev, git_root) renamed_folders = git_renamed_folders(git_rev, stop_rev, git_root) return recipe_dirs + changed_submodules + new_submodules + renamed_folders
[ "def", "git_changed_recipes", "(", "git_rev", "=", "'HEAD@{1}'", ",", "stop_rev", "=", "None", ",", "git_root", "=", "'.'", ")", ":", "changed_files", "=", "_git_changed_files", "(", "git_rev", ",", "stop_rev", ",", "git_root", ")", "recipe_dirs", "=", "_get_base_folders", "(", "git_root", ",", "changed_files", ")", "changed_submodules", "=", "git_changed_submodules", "(", "git_rev", ",", "stop_rev", ",", "git_root", ")", "new_submodules", "=", "git_new_submodules", "(", "git_rev", ",", "stop_rev", ",", "git_root", ")", "renamed_folders", "=", "git_renamed_folders", "(", "git_rev", ",", "stop_rev", ",", "git_root", ")", "return", "recipe_dirs", "+", "changed_submodules", "+", "new_submodules", "+", "renamed_folders" ]
[ 142, 0 ]
[ 165, 78 ]
python
en
['en', 'error', 'th']
False
match_peer_job
(target_matchspec, other_m, this_m=None)
target_matchspec comes from the recipe. target_variant is the variant from the recipe whose deps we are matching. m is the peer job, which must satisfy conda and also have matching keys for any keys that are shared between target_variant and m.config.variant
target_matchspec comes from the recipe. target_variant is the variant from the recipe whose deps we are matching. m is the peer job, which must satisfy conda and also have matching keys for any keys that are shared between target_variant and m.config.variant
def match_peer_job(target_matchspec, other_m, this_m=None): """target_matchspec comes from the recipe. target_variant is the variant from the recipe whose deps we are matching. m is the peer job, which must satisfy conda and also have matching keys for any keys that are shared between target_variant and m.config.variant""" match_dict = {'name': other_m.name(), 'version': other_m.version(), 'build': _fix_any(other_m.build_id(), other_m.config), } if conda_interface.conda_43: match_dict = conda_interface.Dist(name=match_dict['name'], dist_name='-'.join((match_dict['name'], match_dict['version'], match_dict['build'])), version=match_dict['version'], build_string=match_dict['build'], build_number=int(other_m.build_number() or 0), channel=None) matchspec_matches = target_matchspec.match(match_dict) variant_matches = True if this_m: other_m_used_vars = other_m.get_used_loop_vars() for v in this_m.get_used_loop_vars(): if v in other_m_used_vars: variant_matches &= this_m.config.variant[v] == other_m.config.variant[v] return matchspec_matches and variant_matches
[ "def", "match_peer_job", "(", "target_matchspec", ",", "other_m", ",", "this_m", "=", "None", ")", ":", "match_dict", "=", "{", "'name'", ":", "other_m", ".", "name", "(", ")", ",", "'version'", ":", "other_m", ".", "version", "(", ")", ",", "'build'", ":", "_fix_any", "(", "other_m", ".", "build_id", "(", ")", ",", "other_m", ".", "config", ")", ",", "}", "if", "conda_interface", ".", "conda_43", ":", "match_dict", "=", "conda_interface", ".", "Dist", "(", "name", "=", "match_dict", "[", "'name'", "]", ",", "dist_name", "=", "'-'", ".", "join", "(", "(", "match_dict", "[", "'name'", "]", ",", "match_dict", "[", "'version'", "]", ",", "match_dict", "[", "'build'", "]", ")", ")", ",", "version", "=", "match_dict", "[", "'version'", "]", ",", "build_string", "=", "match_dict", "[", "'build'", "]", ",", "build_number", "=", "int", "(", "other_m", ".", "build_number", "(", ")", "or", "0", ")", ",", "channel", "=", "None", ")", "matchspec_matches", "=", "target_matchspec", ".", "match", "(", "match_dict", ")", "variant_matches", "=", "True", "if", "this_m", ":", "other_m_used_vars", "=", "other_m", ".", "get_used_loop_vars", "(", ")", "for", "v", "in", "this_m", ".", "get_used_loop_vars", "(", ")", ":", "if", "v", "in", "other_m_used_vars", ":", "variant_matches", "&=", "this_m", ".", "config", ".", "variant", "[", "v", "]", "==", "other_m", ".", "config", ".", "variant", "[", "v", "]", "return", "matchspec_matches", "and", "variant_matches" ]
[ 248, 0 ]
[ 272, 48 ]
python
en
['en', 'en', 'en']
True
add_intradependencies
(graph)
ensure that downstream packages wait for upstream build/test (not use existing available packages)
ensure that downstream packages wait for upstream build/test (not use existing available packages)
def add_intradependencies(graph): """ensure that downstream packages wait for upstream build/test (not use existing available packages)""" for node in graph.nodes(): if 'meta' not in graph.nodes[node]: continue # get build dependencies m = graph.nodes[node]['meta'] # this is pretty hard. Realistically, we would want to know # what the build and host platforms are on the build machine. # However, all we know right now is what machine we're actually # on (the one calculating the graph). test_requires = m.meta.get('test', {}).get('requires', []) log.info("node: {}".format(node)) log.info(" build: {}".format(m.ms_depends('build'))) log.info(" host: {}".format(m.ms_depends('host'))) log.info(" run: {}".format(m.ms_depends('run'))) log.info(" test: {}".format(test_requires)) deps = set(m.ms_depends('build') + m.ms_depends('host') + m.ms_depends('run') + [conda_interface.MatchSpec(dep) for dep in test_requires or []]) for dep in deps: name_matches = (n for n in graph.nodes() if graph.nodes[n]['meta'].name() == dep.name) for matching_node in name_matches: # are any of these build dependencies also nodes in our graph? if (match_peer_job(conda_interface.MatchSpec(dep), graph.nodes[matching_node]['meta'], m) and (node, matching_node) not in graph.edges()): # add edges if they don't already exist graph.add_edge(node, matching_node)
[ "def", "add_intradependencies", "(", "graph", ")", ":", "for", "node", "in", "graph", ".", "nodes", "(", ")", ":", "if", "'meta'", "not", "in", "graph", ".", "nodes", "[", "node", "]", ":", "continue", "# get build dependencies", "m", "=", "graph", ".", "nodes", "[", "node", "]", "[", "'meta'", "]", "# this is pretty hard. Realistically, we would want to know", "# what the build and host platforms are on the build machine.", "# However, all we know right now is what machine we're actually", "# on (the one calculating the graph).", "test_requires", "=", "m", ".", "meta", ".", "get", "(", "'test'", ",", "{", "}", ")", ".", "get", "(", "'requires'", ",", "[", "]", ")", "log", ".", "info", "(", "\"node: {}\"", ".", "format", "(", "node", ")", ")", "log", ".", "info", "(", "\" build: {}\"", ".", "format", "(", "m", ".", "ms_depends", "(", "'build'", ")", ")", ")", "log", ".", "info", "(", "\" host: {}\"", ".", "format", "(", "m", ".", "ms_depends", "(", "'host'", ")", ")", ")", "log", ".", "info", "(", "\" run: {}\"", ".", "format", "(", "m", ".", "ms_depends", "(", "'run'", ")", ")", ")", "log", ".", "info", "(", "\" test: {}\"", ".", "format", "(", "test_requires", ")", ")", "deps", "=", "set", "(", "m", ".", "ms_depends", "(", "'build'", ")", "+", "m", ".", "ms_depends", "(", "'host'", ")", "+", "m", ".", "ms_depends", "(", "'run'", ")", "+", "[", "conda_interface", ".", "MatchSpec", "(", "dep", ")", "for", "dep", "in", "test_requires", "or", "[", "]", "]", ")", "for", "dep", "in", "deps", ":", "name_matches", "=", "(", "n", "for", "n", "in", "graph", ".", "nodes", "(", ")", "if", "graph", ".", "nodes", "[", "n", "]", "[", "'meta'", "]", ".", "name", "(", ")", "==", "dep", ".", "name", ")", "for", "matching_node", "in", "name_matches", ":", "# are any of these build dependencies also nodes in our graph?", "if", "(", "match_peer_job", "(", "conda_interface", ".", "MatchSpec", "(", "dep", ")", ",", "graph", ".", "nodes", "[", "matching_node", "]", "[", "'meta'", "]", ",", "m", ")", "and", "(", "node", ",", "matching_node", ")", "not", "in", "graph", ".", "edges", "(", ")", ")", ":", "# add edges if they don't already exist", "graph", ".", "add_edge", "(", "node", ",", "matching_node", ")" ]
[ 275, 0 ]
[ 308, 55 ]
python
en
['en', 'en', 'en']
True
collapse_subpackage_nodes
(graph)
Collapse all subpackage nodes into their parent recipe node We get one node per output, but a given recipe can have multiple outputs. It's important for dependency ordering in the graph that the outputs exist independently, but once those dependencies are established, we need to collapse subpackages down to a single job for the top-level recipe.
Collapse all subpackage nodes into their parent recipe node
def collapse_subpackage_nodes(graph): """Collapse all subpackage nodes into their parent recipe node We get one node per output, but a given recipe can have multiple outputs. It's important for dependency ordering in the graph that the outputs exist independently, but once those dependencies are established, we need to collapse subpackages down to a single job for the top-level recipe.""" # group nodes by their recipe path first, then within those groups by their variant node_groups = {} for node in graph.nodes(): if 'meta' in graph.nodes[node]: meta = graph.nodes[node]['meta'] meta_path = meta.meta_path or meta.meta['extra']['parent_recipe']['path'] master = False master_meta = MetaData(meta_path, config=meta.config) if master_meta.name() == meta.name(): master = True group = node_groups.get(meta_path, {}) subgroup = group.get(HashableDict(meta.config.variant), {}) if master: if 'master' in subgroup: raise ValueError("tried to set more than one node in a group as master") subgroup['master'] = node else: sps = subgroup.get('subpackages', []) sps.append(node) subgroup['subpackages'] = sps group[HashableDict(meta.config.variant)] = subgroup node_groups[meta_path] = group for recipe_path, group in node_groups.items(): for variant, subgroup in group.items(): # if no node is the top-level recipe (only outputs, no top-level output), need to obtain # package/name from recipe given by common recipe path. subpackages = subgroup.get('subpackages') if 'master' not in subgroup: sp0 = graph.nodes[subpackages[0]] master_meta = MetaData(recipe_path, config=sp0['meta'].config) worker = sp0['worker'] master_key = package_key(master_meta, worker['label']) graph.add_node(master_key, meta=master_meta, worker=worker) master = graph.nodes[master_key] else: master = subgroup['master'] master_key = package_key(graph.nodes[master]['meta'], graph.nodes[master]['worker']['label']) # fold in dependencies for all of the other subpackages within a group. This is just # the intersection of the edges between all nodes. Store this on the "master" node. if subpackages: remap_edges = [edge for edge in graph.edges() if edge[1] in subpackages] for edge in remap_edges: # make sure not to add references to yourself if edge[0] != master_key: graph.add_edge(edge[0], master_key) graph.remove_edge(*edge) # remove nodes that have been folded into master nodes for subnode in subpackages: graph.remove_node(subnode)
[ "def", "collapse_subpackage_nodes", "(", "graph", ")", ":", "# group nodes by their recipe path first, then within those groups by their variant", "node_groups", "=", "{", "}", "for", "node", "in", "graph", ".", "nodes", "(", ")", ":", "if", "'meta'", "in", "graph", ".", "nodes", "[", "node", "]", ":", "meta", "=", "graph", ".", "nodes", "[", "node", "]", "[", "'meta'", "]", "meta_path", "=", "meta", ".", "meta_path", "or", "meta", ".", "meta", "[", "'extra'", "]", "[", "'parent_recipe'", "]", "[", "'path'", "]", "master", "=", "False", "master_meta", "=", "MetaData", "(", "meta_path", ",", "config", "=", "meta", ".", "config", ")", "if", "master_meta", ".", "name", "(", ")", "==", "meta", ".", "name", "(", ")", ":", "master", "=", "True", "group", "=", "node_groups", ".", "get", "(", "meta_path", ",", "{", "}", ")", "subgroup", "=", "group", ".", "get", "(", "HashableDict", "(", "meta", ".", "config", ".", "variant", ")", ",", "{", "}", ")", "if", "master", ":", "if", "'master'", "in", "subgroup", ":", "raise", "ValueError", "(", "\"tried to set more than one node in a group as master\"", ")", "subgroup", "[", "'master'", "]", "=", "node", "else", ":", "sps", "=", "subgroup", ".", "get", "(", "'subpackages'", ",", "[", "]", ")", "sps", ".", "append", "(", "node", ")", "subgroup", "[", "'subpackages'", "]", "=", "sps", "group", "[", "HashableDict", "(", "meta", ".", "config", ".", "variant", ")", "]", "=", "subgroup", "node_groups", "[", "meta_path", "]", "=", "group", "for", "recipe_path", ",", "group", "in", "node_groups", ".", "items", "(", ")", ":", "for", "variant", ",", "subgroup", "in", "group", ".", "items", "(", ")", ":", "# if no node is the top-level recipe (only outputs, no top-level output), need to obtain", "# package/name from recipe given by common recipe path.", "subpackages", "=", "subgroup", ".", "get", "(", "'subpackages'", ")", "if", "'master'", "not", "in", "subgroup", ":", "sp0", "=", "graph", ".", "nodes", "[", "subpackages", "[", "0", "]", "]", "master_meta", "=", "MetaData", "(", "recipe_path", ",", "config", "=", "sp0", "[", "'meta'", "]", ".", "config", ")", "worker", "=", "sp0", "[", "'worker'", "]", "master_key", "=", "package_key", "(", "master_meta", ",", "worker", "[", "'label'", "]", ")", "graph", ".", "add_node", "(", "master_key", ",", "meta", "=", "master_meta", ",", "worker", "=", "worker", ")", "master", "=", "graph", ".", "nodes", "[", "master_key", "]", "else", ":", "master", "=", "subgroup", "[", "'master'", "]", "master_key", "=", "package_key", "(", "graph", ".", "nodes", "[", "master", "]", "[", "'meta'", "]", ",", "graph", ".", "nodes", "[", "master", "]", "[", "'worker'", "]", "[", "'label'", "]", ")", "# fold in dependencies for all of the other subpackages within a group. This is just", "# the intersection of the edges between all nodes. Store this on the \"master\" node.", "if", "subpackages", ":", "remap_edges", "=", "[", "edge", "for", "edge", "in", "graph", ".", "edges", "(", ")", "if", "edge", "[", "1", "]", "in", "subpackages", "]", "for", "edge", "in", "remap_edges", ":", "# make sure not to add references to yourself", "if", "edge", "[", "0", "]", "!=", "master_key", ":", "graph", ".", "add_edge", "(", "edge", "[", "0", "]", ",", "master_key", ")", "graph", ".", "remove_edge", "(", "*", "edge", ")", "# remove nodes that have been folded into master nodes", "for", "subnode", "in", "subpackages", ":", "graph", ".", "remove_node", "(", "subnode", ")" ]
[ 311, 0 ]
[ 370, 46 ]
python
en
['en', 'en', 'en']
True
construct_graph
(recipes_dir, worker, run, conda_resolve, folders=(), git_rev=None, stop_rev=None, matrix_base_dir=None, config=None, finalize=False)
Construct a directed graph of dependencies from a directory of recipes run: whether to use build or run/test requirements for the graph. Avoids cycles. values: 'build' or 'test'. Actually, only 'build' matters - otherwise, it's run/test for any other value.
Construct a directed graph of dependencies from a directory of recipes
def construct_graph(recipes_dir, worker, run, conda_resolve, folders=(), git_rev=None, stop_rev=None, matrix_base_dir=None, config=None, finalize=False): ''' Construct a directed graph of dependencies from a directory of recipes run: whether to use build or run/test requirements for the graph. Avoids cycles. values: 'build' or 'test'. Actually, only 'build' matters - otherwise, it's run/test for any other value. ''' matrix_base_dir = matrix_base_dir or recipes_dir if not os.path.isabs(recipes_dir): recipes_dir = os.path.normpath(os.path.join(os.getcwd(), recipes_dir)) assert os.path.isdir(recipes_dir) if not folders: if not git_rev: git_rev = 'HEAD' folders = git_changed_recipes(git_rev, stop_rev=stop_rev, git_root=recipes_dir) graph = nx.DiGraph() for folder in folders: recipe_dir = os.path.join(recipes_dir, folder) if not os.path.isdir(recipe_dir): raise ValueError("Specified folder {} does not exist".format(recipe_dir)) add_recipe_to_graph(recipe_dir, graph, run, worker, conda_resolve, recipes_dir, config=config, finalize=finalize) add_intradependencies(graph) collapse_subpackage_nodes(graph) return graph
[ "def", "construct_graph", "(", "recipes_dir", ",", "worker", ",", "run", ",", "conda_resolve", ",", "folders", "=", "(", ")", ",", "git_rev", "=", "None", ",", "stop_rev", "=", "None", ",", "matrix_base_dir", "=", "None", ",", "config", "=", "None", ",", "finalize", "=", "False", ")", ":", "matrix_base_dir", "=", "matrix_base_dir", "or", "recipes_dir", "if", "not", "os", ".", "path", ".", "isabs", "(", "recipes_dir", ")", ":", "recipes_dir", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "getcwd", "(", ")", ",", "recipes_dir", ")", ")", "assert", "os", ".", "path", ".", "isdir", "(", "recipes_dir", ")", "if", "not", "folders", ":", "if", "not", "git_rev", ":", "git_rev", "=", "'HEAD'", "folders", "=", "git_changed_recipes", "(", "git_rev", ",", "stop_rev", "=", "stop_rev", ",", "git_root", "=", "recipes_dir", ")", "graph", "=", "nx", ".", "DiGraph", "(", ")", "for", "folder", "in", "folders", ":", "recipe_dir", "=", "os", ".", "path", ".", "join", "(", "recipes_dir", ",", "folder", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "recipe_dir", ")", ":", "raise", "ValueError", "(", "\"Specified folder {} does not exist\"", ".", "format", "(", "recipe_dir", ")", ")", "add_recipe_to_graph", "(", "recipe_dir", ",", "graph", ",", "run", ",", "worker", ",", "conda_resolve", ",", "recipes_dir", ",", "config", "=", "config", ",", "finalize", "=", "finalize", ")", "add_intradependencies", "(", "graph", ")", "collapse_subpackage_nodes", "(", "graph", ")", "return", "graph" ]
[ 373, 0 ]
[ 404, 16 ]
python
en
['en', 'error', 'th']
False
_installable
(name, version, build_string, config, conda_resolve)
Can Conda install the package we need?
Can Conda install the package we need?
def _installable(name, version, build_string, config, conda_resolve): """Can Conda install the package we need?""" ms = conda_interface.MatchSpec(" ".join([name, _fix_any(version, config), _fix_any(build_string, config)])) installable = conda_resolve.find_matches(ms) if not installable: log.warn("Dependency {name}, version {ver} is not installable from your " "channels: {channels} with subdir {subdir}. Seeing if we can build it..." .format(name=name, ver=version, channels=config.channel_urls, subdir=config.host_subdir)) return installable
[ "def", "_installable", "(", "name", ",", "version", ",", "build_string", ",", "config", ",", "conda_resolve", ")", ":", "ms", "=", "conda_interface", ".", "MatchSpec", "(", "\" \"", ".", "join", "(", "[", "name", ",", "_fix_any", "(", "version", ",", "config", ")", ",", "_fix_any", "(", "build_string", ",", "config", ")", "]", ")", ")", "installable", "=", "conda_resolve", ".", "find_matches", "(", "ms", ")", "if", "not", "installable", ":", "log", ".", "warn", "(", "\"Dependency {name}, version {ver} is not installable from your \"", "\"channels: {channels} with subdir {subdir}. Seeing if we can build it...\"", ".", "format", "(", "name", "=", "name", ",", "ver", "=", "version", ",", "channels", "=", "config", ".", "channel_urls", ",", "subdir", "=", "config", ".", "host_subdir", ")", ")", "return", "installable" ]
[ 413, 0 ]
[ 423, 22 ]
python
en
['en', 'en', 'en']
True
_buildable
(name, version, recipes_dir, worker, config, finalize)
Does the recipe that we have available produce the package we need?
Does the recipe that we have available produce the package we need?
def _buildable(name, version, recipes_dir, worker, config, finalize): """Does the recipe that we have available produce the package we need?""" possible_dirs = os.listdir(recipes_dir) packagename_re = re.compile(r'%s(?:\-[0-9]+[\.0-9\_\-a-zA-Z]*)?$' % name) likely_dirs = (dirname for dirname in possible_dirs if (os.path.isdir(os.path.join(recipes_dir, dirname)) and packagename_re.match(dirname))) metadata_tuples = [m for path in likely_dirs for (m, _, _) in _get_or_render_metadata(os.path.join(recipes_dir, path), worker, finalize=finalize)] # this is our target match ms = conda_interface.MatchSpec(" ".join([name, _fix_any(version, config)])) available = False for m in metadata_tuples: available = match_peer_job(ms, m) if available: break return m.meta_path if available else False
[ "def", "_buildable", "(", "name", ",", "version", ",", "recipes_dir", ",", "worker", ",", "config", ",", "finalize", ")", ":", "possible_dirs", "=", "os", ".", "listdir", "(", "recipes_dir", ")", "packagename_re", "=", "re", ".", "compile", "(", "r'%s(?:\\-[0-9]+[\\.0-9\\_\\-a-zA-Z]*)?$'", "%", "name", ")", "likely_dirs", "=", "(", "dirname", "for", "dirname", "in", "possible_dirs", "if", "(", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "recipes_dir", ",", "dirname", ")", ")", "and", "packagename_re", ".", "match", "(", "dirname", ")", ")", ")", "metadata_tuples", "=", "[", "m", "for", "path", "in", "likely_dirs", "for", "(", "m", ",", "_", ",", "_", ")", "in", "_get_or_render_metadata", "(", "os", ".", "path", ".", "join", "(", "recipes_dir", ",", "path", ")", ",", "worker", ",", "finalize", "=", "finalize", ")", "]", "# this is our target match", "ms", "=", "conda_interface", ".", "MatchSpec", "(", "\" \"", ".", "join", "(", "[", "name", ",", "_fix_any", "(", "version", ",", "config", ")", "]", ")", ")", "available", "=", "False", "for", "m", "in", "metadata_tuples", ":", "available", "=", "match_peer_job", "(", "ms", ",", "m", ")", "if", "available", ":", "break", "return", "m", ".", "meta_path", "if", "available", "else", "False" ]
[ 426, 0 ]
[ 444, 46 ]
python
en
['en', 'en', 'en']
True
add_dependency_nodes_and_edges
(node, graph, run, worker, conda_resolve, recipes_dir=None, finalize=False, config=None)
add build nodes for any upstream deps that are not yet installable changes graph in place.
add build nodes for any upstream deps that are not yet installable
def add_dependency_nodes_and_edges(node, graph, run, worker, conda_resolve, recipes_dir=None, finalize=False, config=None): '''add build nodes for any upstream deps that are not yet installable changes graph in place. ''' metadata = graph.nodes[node]['meta'] # for plain test runs, ignore build reqs. deps = get_run_test_deps(metadata) recipes_dir = recipes_dir or os.getcwd() # cross: need to distinguish between build_subdir (build reqs) and host_subdir if run == 'build': deps.update(get_build_deps(metadata)) for dep, (version, build_str) in deps.items(): # we don't need worker info in _installable because it is already part of conda_resolve if not _installable(dep, version, build_str, metadata.config, conda_resolve): recipe_dir = _buildable(dep, version, recipes_dir, worker, metadata.config, finalize=finalize) if not recipe_dir: continue # raise ValueError("Dependency {} is not installable, and recipe (if " # " available) can't produce desired version ({})." # .format(dep, version)) dep_name = add_recipe_to_graph(recipe_dir, graph, 'build', worker, conda_resolve, recipes_dir, config=config, finalize=finalize) if not dep_name: raise ValueError("Tried to build recipe {0} as dependency, which is skipped " "in meta.yaml".format(recipe_dir)) graph.add_edge(node, dep_name)
[ "def", "add_dependency_nodes_and_edges", "(", "node", ",", "graph", ",", "run", ",", "worker", ",", "conda_resolve", ",", "recipes_dir", "=", "None", ",", "finalize", "=", "False", ",", "config", "=", "None", ")", ":", "metadata", "=", "graph", ".", "nodes", "[", "node", "]", "[", "'meta'", "]", "# for plain test runs, ignore build reqs.", "deps", "=", "get_run_test_deps", "(", "metadata", ")", "recipes_dir", "=", "recipes_dir", "or", "os", ".", "getcwd", "(", ")", "# cross: need to distinguish between build_subdir (build reqs) and host_subdir", "if", "run", "==", "'build'", ":", "deps", ".", "update", "(", "get_build_deps", "(", "metadata", ")", ")", "for", "dep", ",", "(", "version", ",", "build_str", ")", "in", "deps", ".", "items", "(", ")", ":", "# we don't need worker info in _installable because it is already part of conda_resolve", "if", "not", "_installable", "(", "dep", ",", "version", ",", "build_str", ",", "metadata", ".", "config", ",", "conda_resolve", ")", ":", "recipe_dir", "=", "_buildable", "(", "dep", ",", "version", ",", "recipes_dir", ",", "worker", ",", "metadata", ".", "config", ",", "finalize", "=", "finalize", ")", "if", "not", "recipe_dir", ":", "continue", "# raise ValueError(\"Dependency {} is not installable, and recipe (if \"", "# \" available) can't produce desired version ({}).\"", "# .format(dep, version))", "dep_name", "=", "add_recipe_to_graph", "(", "recipe_dir", ",", "graph", ",", "'build'", ",", "worker", ",", "conda_resolve", ",", "recipes_dir", ",", "config", "=", "config", ",", "finalize", "=", "finalize", ")", "if", "not", "dep_name", ":", "raise", "ValueError", "(", "\"Tried to build recipe {0} as dependency, which is skipped \"", "\"in meta.yaml\"", ".", "format", "(", "recipe_dir", ")", ")", "graph", ".", "add_edge", "(", "node", ",", "dep_name", ")" ]
[ 447, 0 ]
[ 477, 42 ]
python
en
['en', 'en', 'en']
True
expand_run
(graph, conda_resolve, worker, run, steps=0, max_downstream=5, recipes_dir=None, matrix_base_dir=None, finalize=False)
Apply the build label to any nodes that need (re)building or testing. "need rebuilding" means both packages that our target package depends on, but are not yet built, as well as packages that depend on our target package. For the latter, you can specify how many dependencies deep (steps) to follow that chain, since it can be quite large. If steps is -1, all downstream dependencies are rebuilt or retested
Apply the build label to any nodes that need (re)building or testing.
def expand_run(graph, conda_resolve, worker, run, steps=0, max_downstream=5, recipes_dir=None, matrix_base_dir=None, finalize=False): """Apply the build label to any nodes that need (re)building or testing. "need rebuilding" means both packages that our target package depends on, but are not yet built, as well as packages that depend on our target package. For the latter, you can specify how many dependencies deep (steps) to follow that chain, since it can be quite large. If steps is -1, all downstream dependencies are rebuilt or retested """ downstream = 0 initial_nodes = len(graph.nodes()) # for build, we get test automatically. Give people the max_downstream in terms # of packages, not tasks # if run == 'build': # max_downstream *= 2 def expand_step(task_graph, full_graph, downstream): for node in task_graph.nodes(): for predecessor in full_graph.predecessors(node): if max_downstream < 0 or (downstream - initial_nodes) < max_downstream: add_recipe_to_graph( os.path.dirname(full_graph.nodes[predecessor]['meta'].meta_path), task_graph, run=run, worker=worker, conda_resolve=conda_resolve, recipes_dir=recipes_dir, finalize=finalize) downstream += 1 return len(graph.nodes()) # starting from our initial collection of dirty nodes, trace the tree down to packages # that depend on the dirty nodes. These packages may need to be rebuilt, or perhaps # just tested. The 'run' argument determines which. if steps != 0: if not recipes_dir: raise ValueError("recipes_dir is necessary if steps != 0. " "Please pass it as an argument.") # here we need to fully populate a graph that has the right build or run/test deps. # We don't create this elsewhere because it is unnecessary and costly. # get all immediate subdirectories other_top_dirs = [d for d in os.listdir(recipes_dir) if os.path.isdir(os.path.join(recipes_dir, d)) and not d.startswith('.')] recipe_dirs = [] for recipe_dir in other_top_dirs: try: find_recipe(os.path.join(recipes_dir, recipe_dir)) recipe_dirs.append(recipe_dir) except IOError: pass # constructing the graph for build will automatically also include the test deps full_graph = construct_graph(recipes_dir, worker, 'build', folders=recipe_dirs, matrix_base_dir=matrix_base_dir, conda_resolve=conda_resolve) if steps >= 0: for step in range(steps): downstream = expand_step(graph, full_graph, downstream) else: while True: nodes = graph.nodes() downstream = expand_step(graph, full_graph, downstream) if nodes == graph.nodes(): break
[ "def", "expand_run", "(", "graph", ",", "conda_resolve", ",", "worker", ",", "run", ",", "steps", "=", "0", ",", "max_downstream", "=", "5", ",", "recipes_dir", "=", "None", ",", "matrix_base_dir", "=", "None", ",", "finalize", "=", "False", ")", ":", "downstream", "=", "0", "initial_nodes", "=", "len", "(", "graph", ".", "nodes", "(", ")", ")", "# for build, we get test automatically. Give people the max_downstream in terms", "# of packages, not tasks", "# if run == 'build':", "# max_downstream *= 2", "def", "expand_step", "(", "task_graph", ",", "full_graph", ",", "downstream", ")", ":", "for", "node", "in", "task_graph", ".", "nodes", "(", ")", ":", "for", "predecessor", "in", "full_graph", ".", "predecessors", "(", "node", ")", ":", "if", "max_downstream", "<", "0", "or", "(", "downstream", "-", "initial_nodes", ")", "<", "max_downstream", ":", "add_recipe_to_graph", "(", "os", ".", "path", ".", "dirname", "(", "full_graph", ".", "nodes", "[", "predecessor", "]", "[", "'meta'", "]", ".", "meta_path", ")", ",", "task_graph", ",", "run", "=", "run", ",", "worker", "=", "worker", ",", "conda_resolve", "=", "conda_resolve", ",", "recipes_dir", "=", "recipes_dir", ",", "finalize", "=", "finalize", ")", "downstream", "+=", "1", "return", "len", "(", "graph", ".", "nodes", "(", ")", ")", "# starting from our initial collection of dirty nodes, trace the tree down to packages", "# that depend on the dirty nodes. These packages may need to be rebuilt, or perhaps", "# just tested. The 'run' argument determines which.", "if", "steps", "!=", "0", ":", "if", "not", "recipes_dir", ":", "raise", "ValueError", "(", "\"recipes_dir is necessary if steps != 0. \"", "\"Please pass it as an argument.\"", ")", "# here we need to fully populate a graph that has the right build or run/test deps.", "# We don't create this elsewhere because it is unnecessary and costly.", "# get all immediate subdirectories", "other_top_dirs", "=", "[", "d", "for", "d", "in", "os", ".", "listdir", "(", "recipes_dir", ")", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "recipes_dir", ",", "d", ")", ")", "and", "not", "d", ".", "startswith", "(", "'.'", ")", "]", "recipe_dirs", "=", "[", "]", "for", "recipe_dir", "in", "other_top_dirs", ":", "try", ":", "find_recipe", "(", "os", ".", "path", ".", "join", "(", "recipes_dir", ",", "recipe_dir", ")", ")", "recipe_dirs", ".", "append", "(", "recipe_dir", ")", "except", "IOError", ":", "pass", "# constructing the graph for build will automatically also include the test deps", "full_graph", "=", "construct_graph", "(", "recipes_dir", ",", "worker", ",", "'build'", ",", "folders", "=", "recipe_dirs", ",", "matrix_base_dir", "=", "matrix_base_dir", ",", "conda_resolve", "=", "conda_resolve", ")", "if", "steps", ">=", "0", ":", "for", "step", "in", "range", "(", "steps", ")", ":", "downstream", "=", "expand_step", "(", "graph", ",", "full_graph", ",", "downstream", ")", "else", ":", "while", "True", ":", "nodes", "=", "graph", ".", "nodes", "(", ")", "downstream", "=", "expand_step", "(", "graph", ",", "full_graph", ",", "downstream", ")", "if", "nodes", "==", "graph", ".", "nodes", "(", ")", ":", "break" ]
[ 485, 0 ]
[ 550, 25 ]
python
en
['en', 'en', 'en']
True
order_build
(graph)
Assumes that packages are in graph. Builds a temporary graph of relevant nodes and returns it topological sort. Relevant nodes selected in a breadth first traversal sourced at each pkg in packages.
Assumes that packages are in graph. Builds a temporary graph of relevant nodes and returns it topological sort.
def order_build(graph): ''' Assumes that packages are in graph. Builds a temporary graph of relevant nodes and returns it topological sort. Relevant nodes selected in a breadth first traversal sourced at each pkg in packages. ''' reorder_cyclical_test_dependencies(graph) try: order = list(nx.topological_sort(graph)) order.reverse() except nx.exception.NetworkXUnfeasible: raise ValueError("Cycles detected in graph: %s", nx.find_cycle(graph, orientation='reverse')) return order
[ "def", "order_build", "(", "graph", ")", ":", "reorder_cyclical_test_dependencies", "(", "graph", ")", "try", ":", "order", "=", "list", "(", "nx", ".", "topological_sort", "(", "graph", ")", ")", "order", ".", "reverse", "(", ")", "except", "nx", ".", "exception", ".", "NetworkXUnfeasible", ":", "raise", "ValueError", "(", "\"Cycles detected in graph: %s\"", ",", "nx", ".", "find_cycle", "(", "graph", ",", "orientation", "=", "'reverse'", ")", ")", "return", "order" ]
[ 553, 0 ]
[ 569, 16 ]
python
en
['en', 'error', 'th']
False
reorder_cyclical_test_dependencies
(graph)
By default, we make things that depend on earlier outputs for build wait for tests of the earlier thing to pass. However, circular dependencies spread across run/test and build/host can make this approach incorrect. For example: A <-- B : B depends on A at build time B <-- A : A depends on B at run time. We can build A before B, but we cannot test A until B is built. To resolve this, we must reorder the graph edges: build A <-- test A <--> build B <-- test B must become: build A <-- build B <-- test A <-- test B
By default, we make things that depend on earlier outputs for build wait for tests of the earlier thing to pass. However, circular dependencies spread across run/test and build/host can make this approach incorrect. For example:
def reorder_cyclical_test_dependencies(graph): """By default, we make things that depend on earlier outputs for build wait for tests of the earlier thing to pass. However, circular dependencies spread across run/test and build/host can make this approach incorrect. For example: A <-- B : B depends on A at build time B <-- A : A depends on B at run time. We can build A before B, but we cannot test A until B is built. To resolve this, we must reorder the graph edges: build A <-- test A <--> build B <-- test B must become: build A <-- build B <-- test A <-- test B """ # find all test nodes with edges to build nodes test_nodes = [node for node in graph.nodes() if node.startswith('test-')] edges_from_test_to_build = [edge for edge in graph.edges() if edge[0] in test_nodes and edge[1].startswith('build-')] # find any of their inverses. Entries here are of the form (test-A, build-B) circular_deps = [edge for edge in edges_from_test_to_build if (edge[1], edge[0]) in graph.edges()] for (testA, buildB) in circular_deps: # remove build B dependence on test A graph.remove_edge(testA, buildB) # remove test B dependence on build B testB = buildB.replace('build-', 'test-', 1) graph.remove_edge(buildB, testB) # Add test B dependence on test A graph.add_edge(testA, testB) # make sure that test A still depends on build B assert (buildB, testA) in graph.edges()
[ "def", "reorder_cyclical_test_dependencies", "(", "graph", ")", ":", "# find all test nodes with edges to build nodes", "test_nodes", "=", "[", "node", "for", "node", "in", "graph", ".", "nodes", "(", ")", "if", "node", ".", "startswith", "(", "'test-'", ")", "]", "edges_from_test_to_build", "=", "[", "edge", "for", "edge", "in", "graph", ".", "edges", "(", ")", "if", "edge", "[", "0", "]", "in", "test_nodes", "and", "edge", "[", "1", "]", ".", "startswith", "(", "'build-'", ")", "]", "# find any of their inverses. Entries here are of the form (test-A, build-B)", "circular_deps", "=", "[", "edge", "for", "edge", "in", "edges_from_test_to_build", "if", "(", "edge", "[", "1", "]", ",", "edge", "[", "0", "]", ")", "in", "graph", ".", "edges", "(", ")", "]", "for", "(", "testA", ",", "buildB", ")", "in", "circular_deps", ":", "# remove build B dependence on test A", "graph", ".", "remove_edge", "(", "testA", ",", "buildB", ")", "# remove test B dependence on build B", "testB", "=", "buildB", ".", "replace", "(", "'build-'", ",", "'test-'", ",", "1", ")", "graph", ".", "remove_edge", "(", "buildB", ",", "testB", ")", "# Add test B dependence on test A", "graph", ".", "add_edge", "(", "testA", ",", "testB", ")", "# make sure that test A still depends on build B", "assert", "(", "buildB", ",", "testA", ")", "in", "graph", ".", "edges", "(", ")" ]
[ 572, 0 ]
[ 607, 47 ]
python
en
['en', 'en', 'en']
True
_ConfigName
(context)
Return the short config name.
Return the short config name.
def _ConfigName(context): """Return the short config name.""" return '{}-config'.format(context.env['deployment'])
[ "def", "_ConfigName", "(", "context", ")", ":", "return", "'{}-config'", ".", "format", "(", "context", ".", "env", "[", "'deployment'", "]", ")" ]
[ 63, 0 ]
[ 65, 54 ]
python
en
['en', 'en', 'en']
True
_ConfigUrl
(context)
Returns the full URL to the config, including hostname.
Returns the full URL to the config, including hostname.
def _ConfigUrl(context): """Returns the full URL to the config, including hostname.""" return '{endpoint}/projects/{project}/configs/{config}'.format( endpoint=RTC_ENDPOINT, project=context.env['project'], config=_ConfigName(context))
[ "def", "_ConfigUrl", "(", "context", ")", ":", "return", "'{endpoint}/projects/{project}/configs/{config}'", ".", "format", "(", "endpoint", "=", "RTC_ENDPOINT", ",", "project", "=", "context", ".", "env", "[", "'project'", "]", ",", "config", "=", "_ConfigName", "(", "context", ")", ")" ]
[ 68, 0 ]
[ 73, 34 ]
python
en
['en', 'en', 'en']
True
_WaiterName
(context)
Returns the short waiter name.
Returns the short waiter name.
def _WaiterName(context): """Returns the short waiter name.""" # This name is only used for the DM manifest entry. The actual waiter name # within RuntimeConfig is static, as it is scoped to the config resource. return '{}-software'.format(context.env['deployment'])
[ "def", "_WaiterName", "(", "context", ")", ":", "# This name is only used for the DM manifest entry. The actual waiter name", "# within RuntimeConfig is static, as it is scoped to the config resource.", "return", "'{}-software'", ".", "format", "(", "context", ".", "env", "[", "'deployment'", "]", ")" ]
[ 76, 0 ]
[ 80, 56 ]
python
en
['en', 'no', 'en']
True
_Timeout
(context)
Returns the timeout property or a default value if unspecified.
Returns the timeout property or a default value if unspecified.
def _Timeout(context): """Returns the timeout property or a default value if unspecified.""" timeout = context.properties.get('timeout', DEFAULT_TIMEOUT) try: return str(int(timeout)) except ValueError: raise PropertyError('Invalid timeout value: {}'.format(timeout))
[ "def", "_Timeout", "(", "context", ")", ":", "timeout", "=", "context", ".", "properties", ".", "get", "(", "'timeout'", ",", "DEFAULT_TIMEOUT", ")", "try", ":", "return", "str", "(", "int", "(", "timeout", ")", ")", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid timeout value: {}'", ".", "format", "(", "timeout", ")", ")" ]
[ 83, 0 ]
[ 89, 68 ]
python
en
['en', 'en', 'en']
True
_SuccessNumber
(context)
Returns the successNumber property or a default value if unspecified.
Returns the successNumber property or a default value if unspecified.
def _SuccessNumber(context): """Returns the successNumber property or a default value if unspecified.""" number = context.properties.get('successNumber', DEFAULT_SUCCESS_NUMBER) try: number = int(number) if number < 1: raise PropertyError('successNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid successNumber value: {}'.format(number))
[ "def", "_SuccessNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'successNumber'", ",", "DEFAULT_SUCCESS_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "raise", "PropertyError", "(", "'successNumber value must be greater than 0.'", ")", "return", "number", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid successNumber value: {}'", ".", "format", "(", "number", ")", ")" ]
[ 92, 0 ]
[ 101, 73 ]
python
en
['en', 'en', 'en']
True
_FailureNumber
(context)
Returns the failureNumber property or a default value if unspecified.
Returns the failureNumber property or a default value if unspecified.
def _FailureNumber(context): """Returns the failureNumber property or a default value if unspecified.""" number = context.properties.get('failureNumber', DEFAULT_FAILURE_NUMBER) try: number = int(number) if number < 1: raise PropertyError('failureNumber value must be greater than 0.') return number except ValueError: raise PropertyError('Invalid failureNumber value: {}'.format(number))
[ "def", "_FailureNumber", "(", "context", ")", ":", "number", "=", "context", ".", "properties", ".", "get", "(", "'failureNumber'", ",", "DEFAULT_FAILURE_NUMBER", ")", "try", ":", "number", "=", "int", "(", "number", ")", "if", "number", "<", "1", ":", "raise", "PropertyError", "(", "'failureNumber value must be greater than 0.'", ")", "return", "number", "except", "ValueError", ":", "raise", "PropertyError", "(", "'Invalid failureNumber value: {}'", ".", "format", "(", "number", ")", ")" ]
[ 104, 0 ]
[ 113, 73 ]
python
en
['en', 'en', 'en']
True
_WaiterDependsOn
(context)
Returns the waiterDependsOn property or an empty list if unspecified.
Returns the waiterDependsOn property or an empty list if unspecified.
def _WaiterDependsOn(context): """Returns the waiterDependsOn property or an empty list if unspecified.""" depends_on = context.properties.get('waiterDependsOn', []) if not isinstance(depends_on, list): raise PropertyError('waiterDependsOn must be a list: {}'.format(depends_on)) for item in depends_on: if not isinstance(item, str): raise PropertyError( 'waiterDependsOn must be a list of strings: {}'.format(depends_on)) return depends_on
[ "def", "_WaiterDependsOn", "(", "context", ")", ":", "depends_on", "=", "context", ".", "properties", ".", "get", "(", "'waiterDependsOn'", ",", "[", "]", ")", "if", "not", "isinstance", "(", "depends_on", ",", "list", ")", ":", "raise", "PropertyError", "(", "'waiterDependsOn must be a list: {}'", ".", "format", "(", "depends_on", ")", ")", "for", "item", "in", "depends_on", ":", "if", "not", "isinstance", "(", "item", ",", "str", ")", ":", "raise", "PropertyError", "(", "'waiterDependsOn must be a list of strings: {}'", ".", "format", "(", "depends_on", ")", ")", "return", "depends_on" ]
[ 116, 0 ]
[ 127, 19 ]
python
en
['en', 'en', 'en']
True
_RuntimeConfig
(context)
Constructs a RuntimeConfig resource.
Constructs a RuntimeConfig resource.
def _RuntimeConfig(context): """Constructs a RuntimeConfig resource.""" deployment_name = context.env['deployment'] return { 'name': _ConfigName(context), 'type': 'runtimeconfig.v1beta1.config', 'properties': { 'config': _ConfigName(context), 'description': ('Holds software readiness status ' 'for deployment {}').format(deployment_name), }, }
[ "def", "_RuntimeConfig", "(", "context", ")", ":", "deployment_name", "=", "context", ".", "env", "[", "'deployment'", "]", "return", "{", "'name'", ":", "_ConfigName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.config'", ",", "'properties'", ":", "{", "'config'", ":", "_ConfigName", "(", "context", ")", ",", "'description'", ":", "(", "'Holds software readiness status '", "'for deployment {}'", ")", ".", "format", "(", "deployment_name", ")", ",", "}", ",", "}" ]
[ 130, 0 ]
[ 142, 3 ]
python
en
['en', 'en', 'en']
True
_Waiter
(context)
Constructs a waiter resource.
Constructs a waiter resource.
def _Waiter(context): """Constructs a waiter resource.""" waiter_timeout = _Timeout(context) return { 'name': _WaiterName(context), 'type': 'runtimeconfig.v1beta1.waiter', 'metadata': { 'dependsOn': _WaiterDependsOn(context), }, 'properties': { 'parent': '$(ref.{}.name)'.format(_ConfigName(context)), 'waiter': 'software', 'timeout': '{}s'.format(waiter_timeout), 'success': { 'cardinality': { 'number': _SuccessNumber(context), 'path': '{}/success'.format(STATUS_PATH), }, }, 'failure': { 'cardinality': { 'number': _FailureNumber(context), 'path': '{}/failure'.format(STATUS_PATH), }, }, }, }
[ "def", "_Waiter", "(", "context", ")", ":", "waiter_timeout", "=", "_Timeout", "(", "context", ")", "return", "{", "'name'", ":", "_WaiterName", "(", "context", ")", ",", "'type'", ":", "'runtimeconfig.v1beta1.waiter'", ",", "'metadata'", ":", "{", "'dependsOn'", ":", "_WaiterDependsOn", "(", "context", ")", ",", "}", ",", "'properties'", ":", "{", "'parent'", ":", "'$(ref.{}.name)'", ".", "format", "(", "_ConfigName", "(", "context", ")", ")", ",", "'waiter'", ":", "'software'", ",", "'timeout'", ":", "'{}s'", ".", "format", "(", "waiter_timeout", ")", ",", "'success'", ":", "{", "'cardinality'", ":", "{", "'number'", ":", "_SuccessNumber", "(", "context", ")", ",", "'path'", ":", "'{}/success'", ".", "format", "(", "STATUS_PATH", ")", ",", "}", ",", "}", ",", "'failure'", ":", "{", "'cardinality'", ":", "{", "'number'", ":", "_FailureNumber", "(", "context", ")", ",", "'path'", ":", "'{}/failure'", ".", "format", "(", "STATUS_PATH", ")", ",", "}", ",", "}", ",", "}", ",", "}" ]
[ 145, 0 ]
[ 172, 3 ]
python
en
['en', 'en', 'en']
True
GenerateConfig
(context)
Entry function to generate the DM config.
Entry function to generate the DM config.
def GenerateConfig(context): """Entry function to generate the DM config.""" content = { 'resources': [ _RuntimeConfig(context), _Waiter(context), ], 'outputs': [ { 'name': 'config-url', 'value': _ConfigUrl(context) }, { 'name': 'variable-path', 'value': STATUS_PATH }, ] } return yaml.safe_dump(content)
[ "def", "GenerateConfig", "(", "context", ")", ":", "content", "=", "{", "'resources'", ":", "[", "_RuntimeConfig", "(", "context", ")", ",", "_Waiter", "(", "context", ")", ",", "]", ",", "'outputs'", ":", "[", "{", "'name'", ":", "'config-url'", ",", "'value'", ":", "_ConfigUrl", "(", "context", ")", "}", ",", "{", "'name'", ":", "'variable-path'", ",", "'value'", ":", "STATUS_PATH", "}", ",", "]", "}", "return", "yaml", ".", "safe_dump", "(", "content", ")" ]
[ 175, 0 ]
[ 193, 32 ]
python
en
['en', 'en', 'en']
True
SVD.__init__
(self, train_file=None, test_file=None, output_file=None, factors=10, sep='\t', output_sep='\t', random_seed=None)
Matrix Factorization for rating prediction Matrix factorization models map both users and items to a joint latent factor space of dimensionality f, such that user-item interactions are modeled as inner products in that space. Usage:: >> MatrixFactorization(train, test).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param factors: Number of latent factors per user/item :type factors: int, default 10 :param sep: Delimiter for input files :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t' :param random_seed: Number of seed. Lock random numbers for reproducibility of experiments. :type random_seed: int, default None
Matrix Factorization for rating prediction
def __init__(self, train_file=None, test_file=None, output_file=None, factors=10, sep='\t', output_sep='\t', random_seed=None): """ Matrix Factorization for rating prediction Matrix factorization models map both users and items to a joint latent factor space of dimensionality f, such that user-item interactions are modeled as inner products in that space. Usage:: >> MatrixFactorization(train, test).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param factors: Number of latent factors per user/item :type factors: int, default 10 :param sep: Delimiter for input files :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t' :param random_seed: Number of seed. Lock random numbers for reproducibility of experiments. :type random_seed: int, default None """ super(SVD, self).__init__(train_file=train_file, test_file=test_file, output_file=output_file, sep=sep, output_sep=output_sep) self.recommender_name = 'SVD' self.factors = factors if random_seed is not None: np.random.seed(random_seed) # internal vars self.feedback_triples = None self.prediction_matrix = None
[ "def", "__init__", "(", "self", ",", "train_file", "=", "None", ",", "test_file", "=", "None", ",", "output_file", "=", "None", ",", "factors", "=", "10", ",", "sep", "=", "'\\t'", ",", "output_sep", "=", "'\\t'", ",", "random_seed", "=", "None", ")", ":", "super", "(", "SVD", ",", "self", ")", ".", "__init__", "(", "train_file", "=", "train_file", ",", "test_file", "=", "test_file", ",", "output_file", "=", "output_file", ",", "sep", "=", "sep", ",", "output_sep", "=", "output_sep", ")", "self", ".", "recommender_name", "=", "'SVD'", "self", ".", "factors", "=", "factors", "if", "random_seed", "is", "not", "None", ":", "np", ".", "random", ".", "seed", "(", "random_seed", ")", "# internal vars", "self", ".", "feedback_triples", "=", "None", "self", ".", "prediction_matrix", "=", "None" ]
[ 25, 4 ]
[ 72, 37 ]
python
en
['en', 'error', 'th']
False
SVD.init_model
(self)
Method to treat and initialize the model
Method to treat and initialize the model
def init_model(self): """ Method to treat and initialize the model """ self.feedback_triples = [] # Map interaction with ids for user in self.train_set['feedback']: for item in self.train_set['feedback'][user]: self.feedback_triples.append((self.user_to_user_id[user], self.item_to_item_id[item], self.train_set['feedback'][user][item])) self.create_matrix()
[ "def", "init_model", "(", "self", ")", ":", "self", ".", "feedback_triples", "=", "[", "]", "# Map interaction with ids", "for", "user", "in", "self", ".", "train_set", "[", "'feedback'", "]", ":", "for", "item", "in", "self", ".", "train_set", "[", "'feedback'", "]", "[", "user", "]", ":", "self", ".", "feedback_triples", ".", "append", "(", "(", "self", ".", "user_to_user_id", "[", "user", "]", ",", "self", ".", "item_to_item_id", "[", "item", "]", ",", "self", ".", "train_set", "[", "'feedback'", "]", "[", "user", "]", "[", "item", "]", ")", ")", "self", ".", "create_matrix", "(", ")" ]
[ 74, 4 ]
[ 88, 28 ]
python
en
['en', 'error', 'th']
False
SVD.fit
(self)
This method performs Singular Value Decomposition over the training data.
This method performs Singular Value Decomposition over the training data.
def fit(self): """ This method performs Singular Value Decomposition over the training data. """ u, s, vt = svds(self.matrix, k=self.factors) s_diagonal_matrix = np.diag(s) self.prediction_matrix = np.dot(np.dot(u, s_diagonal_matrix), vt)
[ "def", "fit", "(", "self", ")", ":", "u", ",", "s", ",", "vt", "=", "svds", "(", "self", ".", "matrix", ",", "k", "=", "self", ".", "factors", ")", "s_diagonal_matrix", "=", "np", ".", "diag", "(", "s", ")", "self", ".", "prediction_matrix", "=", "np", ".", "dot", "(", "np", ".", "dot", "(", "u", ",", "s_diagonal_matrix", ")", ",", "vt", ")" ]
[ 90, 4 ]
[ 98, 73 ]
python
en
['en', 'error', 'th']
False
SVD.predict_score
(self, u, i, cond=True)
Method to predict a single score for a pair (user, item) :param u: User ID :type u: int :param i: Item ID :type i: int :param cond: Use max and min values of train set to limit score :type cond: bool, default True :return: Score generate for pair (user, item) :rtype: float
Method to predict a single score for a pair (user, item)
def predict_score(self, u, i, cond=True): """ Method to predict a single score for a pair (user, item) :param u: User ID :type u: int :param i: Item ID :type i: int :param cond: Use max and min values of train set to limit score :type cond: bool, default True :return: Score generate for pair (user, item) :rtype: float """ rui = self.train_set["mean_value"] + self.prediction_matrix[u][i] if cond: if rui > self.train_set["max_value"]: rui = self.train_set["max_value"] elif rui < self.train_set["min_value"]: rui = self.train_set["min_value"] return rui
[ "def", "predict_score", "(", "self", ",", "u", ",", "i", ",", "cond", "=", "True", ")", ":", "rui", "=", "self", ".", "train_set", "[", "\"mean_value\"", "]", "+", "self", ".", "prediction_matrix", "[", "u", "]", "[", "i", "]", "if", "cond", ":", "if", "rui", ">", "self", ".", "train_set", "[", "\"max_value\"", "]", ":", "rui", "=", "self", ".", "train_set", "[", "\"max_value\"", "]", "elif", "rui", "<", "self", ".", "train_set", "[", "\"min_value\"", "]", ":", "rui", "=", "self", ".", "train_set", "[", "\"min_value\"", "]", "return", "rui" ]
[ 100, 4 ]
[ 126, 18 ]
python
en
['en', 'error', 'th']
False