nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/distutils/archive_util.py
python
_get_uid
(name)
return None
Returns an uid, given a user name.
Returns an uid, given a user name.
[ "Returns", "an", "uid", "given", "a", "user", "name", "." ]
def _get_uid(name): """Returns an uid, given a user name.""" if getpwnam is None or name is None: return None try: result = getpwnam(name) except KeyError: result = None if result is not None: return result[2] return None
[ "def", "_get_uid", "(", "name", ")", ":", "if", "getpwnam", "is", "None", "or", "name", "is", "None", ":", "return", "None", "try", ":", "result", "=", "getpwnam", "(", "name", ")", "except", "KeyError", ":", "result", "=", "None", "if", "result", "is", "not", "None", ":", "return", "result", "[", "2", "]", "return", "None" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/distutils/archive_util.py#L39-L49
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/stats/stats.py
python
ranksums
(x, y)
return RanksumsResult(z, prob)
Compute the Wilcoxon rank-sum statistic for two samples. The Wilcoxon rank-sum test tests the null hypothesis that two sets of measurements are drawn from the same distribution. The alternative hypothesis is that values in one sample are more likely to be larger than the values in the other sample. This test should be used to compare two samples from continuous distributions. It does not handle ties between measurements in x and y. For tie-handling and an optional continuity correction see `scipy.stats.mannwhitneyu`. Parameters ---------- x,y : array_like The data from the two samples Returns ------- statistic : float The test statistic under the large-sample approximation that the rank sum statistic is normally distributed pvalue : float The two-sided p-value of the test References ---------- .. [1] http://en.wikipedia.org/wiki/Wilcoxon_rank-sum_test
Compute the Wilcoxon rank-sum statistic for two samples.
[ "Compute", "the", "Wilcoxon", "rank", "-", "sum", "statistic", "for", "two", "samples", "." ]
def ranksums(x, y): """ Compute the Wilcoxon rank-sum statistic for two samples. The Wilcoxon rank-sum test tests the null hypothesis that two sets of measurements are drawn from the same distribution. The alternative hypothesis is that values in one sample are more likely to be larger than the values in the other sample. This test should be used to compare two samples from continuous distributions. It does not handle ties between measurements in x and y. For tie-handling and an optional continuity correction see `scipy.stats.mannwhitneyu`. Parameters ---------- x,y : array_like The data from the two samples Returns ------- statistic : float The test statistic under the large-sample approximation that the rank sum statistic is normally distributed pvalue : float The two-sided p-value of the test References ---------- .. [1] http://en.wikipedia.org/wiki/Wilcoxon_rank-sum_test """ x, y = map(np.asarray, (x, y)) n1 = len(x) n2 = len(y) alldata = np.concatenate((x, y)) ranked = rankdata(alldata) x = ranked[:n1] s = np.sum(x, axis=0) expected = n1 * (n1+n2+1) / 2.0 z = (s - expected) / np.sqrt(n1*n2*(n1+n2+1)/12.0) prob = 2 * distributions.norm.sf(abs(z)) return RanksumsResult(z, prob)
[ "def", "ranksums", "(", "x", ",", "y", ")", ":", "x", ",", "y", "=", "map", "(", "np", ".", "asarray", ",", "(", "x", ",", "y", ")", ")", "n1", "=", "len", "(", "x", ")", "n2", "=", "len", "(", "y", ")", "alldata", "=", "np", ".", "concatenate", "(", "(", "x", ",", "y", ")", ")", "ranked", "=", "rankdata", "(", "alldata", ")", "x", "=", "ranked", "[", ":", "n1", "]", "s", "=", "np", ".", "sum", "(", "x", ",", "axis", "=", "0", ")", "expected", "=", "n1", "*", "(", "n1", "+", "n2", "+", "1", ")", "/", "2.0", "z", "=", "(", "s", "-", "expected", ")", "/", "np", ".", "sqrt", "(", "n1", "*", "n2", "*", "(", "n1", "+", "n2", "+", "1", ")", "/", "12.0", ")", "prob", "=", "2", "*", "distributions", ".", "norm", ".", "sf", "(", "abs", "(", "z", ")", ")", "return", "RanksumsResult", "(", "z", ",", "prob", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/stats/stats.py#L4705-L4748
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/docutils/utils/math/math2html.py
python
FilePosition.current
(self)
return self.reader.currentline()[self.pos]
Return the current character, assuming we are not out.
Return the current character, assuming we are not out.
[ "Return", "the", "current", "character", "assuming", "we", "are", "not", "out", "." ]
def current(self): "Return the current character, assuming we are not out." if self.pos == len(self.reader.currentline()): return '\n' if self.pos > len(self.reader.currentline()): Trace.error('Out of the line ' + self.reader.currentline() + ': ' + unicode(self.pos)) return '*' return self.reader.currentline()[self.pos]
[ "def", "current", "(", "self", ")", ":", "if", "self", ".", "pos", "==", "len", "(", "self", ".", "reader", ".", "currentline", "(", ")", ")", ":", "return", "'\\n'", "if", "self", ".", "pos", ">", "len", "(", "self", ".", "reader", ".", "currentline", "(", ")", ")", ":", "Trace", ".", "error", "(", "'Out of the line '", "+", "self", ".", "reader", ".", "currentline", "(", ")", "+", "': '", "+", "unicode", "(", "self", ".", "pos", ")", ")", "return", "'*'", "return", "self", ".", "reader", ".", "currentline", "(", ")", "[", "self", ".", "pos", "]" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/utils/math/math2html.py#L2156-L2163
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/pickletools.py
python
read_bytes8
(f)
r""" >>> import io, struct, sys >>> read_bytes8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc")) b'' >>> read_bytes8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef")) b'abc' >>> bigsize8 = struct.pack("<Q", sys.maxsize//3) >>> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: expected ... bytes in a bytes8, but only 6 remain
r""" >>> import io, struct, sys >>> read_bytes8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc")) b'' >>> read_bytes8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef")) b'abc' >>> bigsize8 = struct.pack("<Q", sys.maxsize//3) >>> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: expected ... bytes in a bytes8, but only 6 remain
[ "r", ">>>", "import", "io", "struct", "sys", ">>>", "read_bytes8", "(", "io", ".", "BytesIO", "(", "b", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00abc", "))", "b", ">>>", "read_bytes8", "(", "io", ".", "BytesIO", "(", "b", "\\", "x03", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00", "\\", "x00abcdef", "))", "b", "abc", ">>>", "bigsize8", "=", "struct", ".", "pack", "(", "<Q", "sys", ".", "maxsize", "//", "3", ")", ">>>", "read_bytes8", "(", "io", ".", "BytesIO", "(", "bigsize8", "+", "b", "abcdef", "))", "#doctest", ":", "+", "ELLIPSIS", "Traceback", "(", "most", "recent", "call", "last", ")", ":", "...", "ValueError", ":", "expected", "...", "bytes", "in", "a", "bytes8", "but", "only", "6", "remain" ]
def read_bytes8(f): r""" >>> import io, struct, sys >>> read_bytes8(io.BytesIO(b"\x00\x00\x00\x00\x00\x00\x00\x00abc")) b'' >>> read_bytes8(io.BytesIO(b"\x03\x00\x00\x00\x00\x00\x00\x00abcdef")) b'abc' >>> bigsize8 = struct.pack("<Q", sys.maxsize//3) >>> read_bytes8(io.BytesIO(bigsize8 + b"abcdef")) #doctest: +ELLIPSIS Traceback (most recent call last): ... ValueError: expected ... bytes in a bytes8, but only 6 remain """ n = read_uint8(f) assert n >= 0 if n > sys.maxsize: raise ValueError("bytes8 byte count > sys.maxsize: %d" % n) data = f.read(n) if len(data) == n: return data raise ValueError("expected %d bytes in a bytes8, but only %d remain" % (n, len(data)))
[ "def", "read_bytes8", "(", "f", ")", ":", "n", "=", "read_uint8", "(", "f", ")", "assert", "n", ">=", "0", "if", "n", ">", "sys", ".", "maxsize", ":", "raise", "ValueError", "(", "\"bytes8 byte count > sys.maxsize: %d\"", "%", "n", ")", "data", "=", "f", ".", "read", "(", "n", ")", "if", "len", "(", "data", ")", "==", "n", ":", "return", "data", "raise", "ValueError", "(", "\"expected %d bytes in a bytes8, but only %d remain\"", "%", "(", "n", ",", "len", "(", "data", ")", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/pickletools.py#L534-L556
yyzybb537/libgo
4af17b7c67643c4d54aa354dcc77963ea07847d0
third_party/boost.context/tools/build/src/build/generators.py
python
Generator.clone
(self, new_id, new_toolset_properties)
return self.__class__ (new_id, self.composing_, self.source_types_, self.target_types_and_names_, # Note: this does not remove any subfeatures of <toolset> # which might cause problems property.change (self.requirements_, '<toolset>') + new_toolset_properties)
Returns another generator which differers from $(self) in - id - value to <toolset> feature in properties
Returns another generator which differers from $(self) in - id - value to <toolset> feature in properties
[ "Returns", "another", "generator", "which", "differers", "from", "$", "(", "self", ")", "in", "-", "id", "-", "value", "to", "<toolset", ">", "feature", "in", "properties" ]
def clone (self, new_id, new_toolset_properties): """ Returns another generator which differers from $(self) in - id - value to <toolset> feature in properties """ assert isinstance(new_id, basestring) assert is_iterable_typed(new_toolset_properties, basestring) return self.__class__ (new_id, self.composing_, self.source_types_, self.target_types_and_names_, # Note: this does not remove any subfeatures of <toolset> # which might cause problems property.change (self.requirements_, '<toolset>') + new_toolset_properties)
[ "def", "clone", "(", "self", ",", "new_id", ",", "new_toolset_properties", ")", ":", "assert", "isinstance", "(", "new_id", ",", "basestring", ")", "assert", "is_iterable_typed", "(", "new_toolset_properties", ",", "basestring", ")", "return", "self", ".", "__class__", "(", "new_id", ",", "self", ".", "composing_", ",", "self", ".", "source_types_", ",", "self", ".", "target_types_and_names_", ",", "# Note: this does not remove any subfeatures of <toolset>", "# which might cause problems", "property", ".", "change", "(", "self", ".", "requirements_", ",", "'<toolset>'", ")", "+", "new_toolset_properties", ")" ]
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/generators.py#L230-L243
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
benchmarks/distributed/rpc/rl/agent.py
python
Policy.__init__
(self, in_features, nlayers, out_features)
r""" Inits policy class Args: in_features (int): Number of input features the model takes nlayers (int): Number of layers in the model out_features (int): Number of features the model outputs
r""" Inits policy class Args: in_features (int): Number of input features the model takes nlayers (int): Number of layers in the model out_features (int): Number of features the model outputs
[ "r", "Inits", "policy", "class", "Args", ":", "in_features", "(", "int", ")", ":", "Number", "of", "input", "features", "the", "model", "takes", "nlayers", "(", "int", ")", ":", "Number", "of", "layers", "in", "the", "model", "out_features", "(", "int", ")", ":", "Number", "of", "features", "the", "model", "outputs" ]
def __init__(self, in_features, nlayers, out_features): r""" Inits policy class Args: in_features (int): Number of input features the model takes nlayers (int): Number of layers in the model out_features (int): Number of features the model outputs """ super(Policy, self).__init__() self.model = nn.Sequential( nn.Flatten(1, -1), nn.Linear(in_features, out_features), * [nn.Linear(out_features, out_features) for _ in range(nlayers)] ) self.dim = 0
[ "def", "__init__", "(", "self", ",", "in_features", ",", "nlayers", ",", "out_features", ")", ":", "super", "(", "Policy", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "model", "=", "nn", ".", "Sequential", "(", "nn", ".", "Flatten", "(", "1", ",", "-", "1", ")", ",", "nn", ".", "Linear", "(", "in_features", ",", "out_features", ")", ",", "*", "[", "nn", ".", "Linear", "(", "out_features", ",", "out_features", ")", "for", "_", "in", "range", "(", "nlayers", ")", "]", ")", "self", ".", "dim", "=", "0" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/benchmarks/distributed/rpc/rl/agent.py#L17-L32
Tencent/CMONGO
c40380caa14e05509f46993aa8b8da966b09b0b5
src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Subst.py
python
scons_subst
(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None)
return result
Expand a string or list containing construction variable substitutions. This is the work-horse function for substitutions in file names and the like. The companion scons_subst_list() function (below) handles separating command lines into lists of arguments, so see that function if that's what you're looking for.
Expand a string or list containing construction variable substitutions.
[ "Expand", "a", "string", "or", "list", "containing", "construction", "variable", "substitutions", "." ]
def scons_subst(strSubst, env, mode=SUBST_RAW, target=None, source=None, gvars={}, lvars={}, conv=None): """Expand a string or list containing construction variable substitutions. This is the work-horse function for substitutions in file names and the like. The companion scons_subst_list() function (below) handles separating command lines into lists of arguments, so see that function if that's what you're looking for. """ if isinstance(strSubst, str) and strSubst.find('$') < 0: return strSubst class StringSubber(object): """A class to construct the results of a scons_subst() call. This binds a specific construction environment, mode, target and source with two methods (substitute() and expand()) that handle the expansion. """ def __init__(self, env, mode, conv, gvars): self.env = env self.mode = mode self.conv = conv self.gvars = gvars def expand(self, s, lvars): """Expand a single "token" as necessary, returning an appropriate string containing the expansion. This handles expanding different types of things (strings, lists, callables) appropriately. It calls the wrapper substitute() method to re-expand things as necessary, so that the results of expansions of side-by-side strings still get re-evaluated separately, not smushed together. """ if is_String(s): try: s0, s1 = s[:2] except (IndexError, ValueError): return s if s0 != '$': return s if s1 == '$': return '$' elif s1 in '()': return s else: key = s[1:] if key[0] == '{' or key.find('.') >= 0: if key[0] == '{': key = key[1:-1] try: s = eval(key, self.gvars, lvars) except KeyboardInterrupt: raise except Exception, e: if e.__class__ in AllowableExceptions: return '' raise_exception(e, lvars['TARGETS'], s) else: if key in lvars: s = lvars[key] elif key in self.gvars: s = self.gvars[key] elif not NameError in AllowableExceptions: raise_exception(NameError(key), lvars['TARGETS'], s) else: return '' # Before re-expanding the result, handle # recursive expansion by copying the local # variable dictionary and overwriting a null # string for the value of the variable name # we just expanded. # # This could potentially be optimized by only # copying lvars when s contains more expansions, # but lvars is usually supposed to be pretty # small, and deeply nested variable expansions # are probably more the exception than the norm, # so it should be tolerable for now. lv = lvars.copy() var = key.split('.')[0] lv[var] = '' return self.substitute(s, lv) elif is_Sequence(s): def func(l, conv=self.conv, substitute=self.substitute, lvars=lvars): return conv(substitute(l, lvars)) return list(map(func, s)) elif callable(s): try: s = s(target=lvars['TARGETS'], source=lvars['SOURCES'], env=self.env, for_signature=(self.mode != SUBST_CMD)) except TypeError: # This probably indicates that it's a callable # object that doesn't match our calling arguments # (like an Action). if self.mode == SUBST_RAW: return s s = self.conv(s) return self.substitute(s, lvars) elif s is None: return '' else: return s def substitute(self, args, lvars): """Substitute expansions in an argument or list of arguments. This serves as a wrapper for splitting up a string into separate tokens. """ if is_String(args) and not isinstance(args, CmdStringHolder): args = str(args) # In case it's a UserString. try: def sub_match(match): return self.conv(self.expand(match.group(1), lvars)) result = _dollar_exps.sub(sub_match, args) except TypeError: # If the internal conversion routine doesn't return # strings (it could be overridden to return Nodes, for # example), then the 1.5.2 re module will throw this # exception. Back off to a slower, general-purpose # algorithm that works for all data types. args = _separate_args.findall(args) result = [] for a in args: result.append(self.conv(self.expand(a, lvars))) if len(result) == 1: result = result[0] else: result = ''.join(map(str, result)) return result else: return self.expand(args, lvars) if conv is None: conv = _strconv[mode] # Doing this every time is a bit of a waste, since the Executor # has typically already populated the OverrideEnvironment with # $TARGET/$SOURCE variables. We're keeping this (for now), though, # because it supports existing behavior that allows us to call # an Action directly with an arbitrary target+source pair, which # we use in Tool/tex.py to handle calling $BIBTEX when necessary. # If we dropped that behavior (or found another way to cover it), # we could get rid of this call completely and just rely on the # Executor setting the variables. if 'TARGET' not in lvars: d = subst_dict(target, source) if d: lvars = lvars.copy() lvars.update(d) # We're (most likely) going to eval() things. If Python doesn't # find a __builtins__ value in the global dictionary used for eval(), # it copies the current global values for you. Avoid this by # setting it explicitly and then deleting, so we don't pollute the # construction environment Dictionary(ies) that are typically used # for expansion. gvars['__builtins__'] = __builtins__ ss = StringSubber(env, mode, conv, gvars) result = ss.substitute(strSubst, lvars) try: del gvars['__builtins__'] except KeyError: pass if is_String(result): # Remove $(-$) pairs and any stuff in between, # if that's appropriate. remove = _regex_remove[mode] if remove: result = remove.sub('', result) if mode != SUBST_RAW: # Compress strings of white space characters into # a single space. result = _space_sep.sub(' ', result).strip() elif is_Sequence(result): remove = _list_remove[mode] if remove: result = remove(result) return result
[ "def", "scons_subst", "(", "strSubst", ",", "env", ",", "mode", "=", "SUBST_RAW", ",", "target", "=", "None", ",", "source", "=", "None", ",", "gvars", "=", "{", "}", ",", "lvars", "=", "{", "}", ",", "conv", "=", "None", ")", ":", "if", "isinstance", "(", "strSubst", ",", "str", ")", "and", "strSubst", ".", "find", "(", "'$'", ")", "<", "0", ":", "return", "strSubst", "class", "StringSubber", "(", "object", ")", ":", "\"\"\"A class to construct the results of a scons_subst() call.\n\n This binds a specific construction environment, mode, target and\n source with two methods (substitute() and expand()) that handle\n the expansion.\n \"\"\"", "def", "__init__", "(", "self", ",", "env", ",", "mode", ",", "conv", ",", "gvars", ")", ":", "self", ".", "env", "=", "env", "self", ".", "mode", "=", "mode", "self", ".", "conv", "=", "conv", "self", ".", "gvars", "=", "gvars", "def", "expand", "(", "self", ",", "s", ",", "lvars", ")", ":", "\"\"\"Expand a single \"token\" as necessary, returning an\n appropriate string containing the expansion.\n\n This handles expanding different types of things (strings,\n lists, callables) appropriately. It calls the wrapper\n substitute() method to re-expand things as necessary, so that\n the results of expansions of side-by-side strings still get\n re-evaluated separately, not smushed together.\n \"\"\"", "if", "is_String", "(", "s", ")", ":", "try", ":", "s0", ",", "s1", "=", "s", "[", ":", "2", "]", "except", "(", "IndexError", ",", "ValueError", ")", ":", "return", "s", "if", "s0", "!=", "'$'", ":", "return", "s", "if", "s1", "==", "'$'", ":", "return", "'$'", "elif", "s1", "in", "'()'", ":", "return", "s", "else", ":", "key", "=", "s", "[", "1", ":", "]", "if", "key", "[", "0", "]", "==", "'{'", "or", "key", ".", "find", "(", "'.'", ")", ">=", "0", ":", "if", "key", "[", "0", "]", "==", "'{'", ":", "key", "=", "key", "[", "1", ":", "-", "1", "]", "try", ":", "s", "=", "eval", "(", "key", ",", "self", ".", "gvars", ",", "lvars", ")", "except", "KeyboardInterrupt", ":", "raise", "except", "Exception", ",", "e", ":", "if", "e", ".", "__class__", "in", "AllowableExceptions", ":", "return", "''", "raise_exception", "(", "e", ",", "lvars", "[", "'TARGETS'", "]", ",", "s", ")", "else", ":", "if", "key", "in", "lvars", ":", "s", "=", "lvars", "[", "key", "]", "elif", "key", "in", "self", ".", "gvars", ":", "s", "=", "self", ".", "gvars", "[", "key", "]", "elif", "not", "NameError", "in", "AllowableExceptions", ":", "raise_exception", "(", "NameError", "(", "key", ")", ",", "lvars", "[", "'TARGETS'", "]", ",", "s", ")", "else", ":", "return", "''", "# Before re-expanding the result, handle", "# recursive expansion by copying the local", "# variable dictionary and overwriting a null", "# string for the value of the variable name", "# we just expanded.", "#", "# This could potentially be optimized by only", "# copying lvars when s contains more expansions,", "# but lvars is usually supposed to be pretty", "# small, and deeply nested variable expansions", "# are probably more the exception than the norm,", "# so it should be tolerable for now.", "lv", "=", "lvars", ".", "copy", "(", ")", "var", "=", "key", ".", "split", "(", "'.'", ")", "[", "0", "]", "lv", "[", "var", "]", "=", "''", "return", "self", ".", "substitute", "(", "s", ",", "lv", ")", "elif", "is_Sequence", "(", "s", ")", ":", "def", "func", "(", "l", ",", "conv", "=", "self", ".", "conv", ",", "substitute", "=", "self", ".", "substitute", ",", "lvars", "=", "lvars", ")", ":", "return", "conv", "(", "substitute", "(", "l", ",", "lvars", ")", ")", "return", "list", "(", "map", "(", "func", ",", "s", ")", ")", "elif", "callable", "(", "s", ")", ":", "try", ":", "s", "=", "s", "(", "target", "=", "lvars", "[", "'TARGETS'", "]", ",", "source", "=", "lvars", "[", "'SOURCES'", "]", ",", "env", "=", "self", ".", "env", ",", "for_signature", "=", "(", "self", ".", "mode", "!=", "SUBST_CMD", ")", ")", "except", "TypeError", ":", "# This probably indicates that it's a callable", "# object that doesn't match our calling arguments", "# (like an Action).", "if", "self", ".", "mode", "==", "SUBST_RAW", ":", "return", "s", "s", "=", "self", ".", "conv", "(", "s", ")", "return", "self", ".", "substitute", "(", "s", ",", "lvars", ")", "elif", "s", "is", "None", ":", "return", "''", "else", ":", "return", "s", "def", "substitute", "(", "self", ",", "args", ",", "lvars", ")", ":", "\"\"\"Substitute expansions in an argument or list of arguments.\n\n This serves as a wrapper for splitting up a string into\n separate tokens.\n \"\"\"", "if", "is_String", "(", "args", ")", "and", "not", "isinstance", "(", "args", ",", "CmdStringHolder", ")", ":", "args", "=", "str", "(", "args", ")", "# In case it's a UserString.", "try", ":", "def", "sub_match", "(", "match", ")", ":", "return", "self", ".", "conv", "(", "self", ".", "expand", "(", "match", ".", "group", "(", "1", ")", ",", "lvars", ")", ")", "result", "=", "_dollar_exps", ".", "sub", "(", "sub_match", ",", "args", ")", "except", "TypeError", ":", "# If the internal conversion routine doesn't return", "# strings (it could be overridden to return Nodes, for", "# example), then the 1.5.2 re module will throw this", "# exception. Back off to a slower, general-purpose", "# algorithm that works for all data types.", "args", "=", "_separate_args", ".", "findall", "(", "args", ")", "result", "=", "[", "]", "for", "a", "in", "args", ":", "result", ".", "append", "(", "self", ".", "conv", "(", "self", ".", "expand", "(", "a", ",", "lvars", ")", ")", ")", "if", "len", "(", "result", ")", "==", "1", ":", "result", "=", "result", "[", "0", "]", "else", ":", "result", "=", "''", ".", "join", "(", "map", "(", "str", ",", "result", ")", ")", "return", "result", "else", ":", "return", "self", ".", "expand", "(", "args", ",", "lvars", ")", "if", "conv", "is", "None", ":", "conv", "=", "_strconv", "[", "mode", "]", "# Doing this every time is a bit of a waste, since the Executor", "# has typically already populated the OverrideEnvironment with", "# $TARGET/$SOURCE variables. We're keeping this (for now), though,", "# because it supports existing behavior that allows us to call", "# an Action directly with an arbitrary target+source pair, which", "# we use in Tool/tex.py to handle calling $BIBTEX when necessary.", "# If we dropped that behavior (or found another way to cover it),", "# we could get rid of this call completely and just rely on the", "# Executor setting the variables.", "if", "'TARGET'", "not", "in", "lvars", ":", "d", "=", "subst_dict", "(", "target", ",", "source", ")", "if", "d", ":", "lvars", "=", "lvars", ".", "copy", "(", ")", "lvars", ".", "update", "(", "d", ")", "# We're (most likely) going to eval() things. If Python doesn't", "# find a __builtins__ value in the global dictionary used for eval(),", "# it copies the current global values for you. Avoid this by", "# setting it explicitly and then deleting, so we don't pollute the", "# construction environment Dictionary(ies) that are typically used", "# for expansion.", "gvars", "[", "'__builtins__'", "]", "=", "__builtins__", "ss", "=", "StringSubber", "(", "env", ",", "mode", ",", "conv", ",", "gvars", ")", "result", "=", "ss", ".", "substitute", "(", "strSubst", ",", "lvars", ")", "try", ":", "del", "gvars", "[", "'__builtins__'", "]", "except", "KeyError", ":", "pass", "if", "is_String", "(", "result", ")", ":", "# Remove $(-$) pairs and any stuff in between,", "# if that's appropriate.", "remove", "=", "_regex_remove", "[", "mode", "]", "if", "remove", ":", "result", "=", "remove", ".", "sub", "(", "''", ",", "result", ")", "if", "mode", "!=", "SUBST_RAW", ":", "# Compress strings of white space characters into", "# a single space.", "result", "=", "_space_sep", ".", "sub", "(", "' '", ",", "result", ")", ".", "strip", "(", ")", "elif", "is_Sequence", "(", "result", ")", ":", "remove", "=", "_list_remove", "[", "mode", "]", "if", "remove", ":", "result", "=", "remove", "(", "result", ")", "return", "result" ]
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/Subst.py#L393-L580
Tencent/CMONGO
c40380caa14e05509f46993aa8b8da966b09b0b5
buildscripts/resmokelib/core/programs.py
python
mongod_program
(logger, executable=None, process_kwargs=None, **kwargs)
return _process.Process(logger, args, **process_kwargs)
Returns a Process instance that starts a mongod executable with arguments constructed from 'kwargs'.
Returns a Process instance that starts a mongod executable with arguments constructed from 'kwargs'.
[ "Returns", "a", "Process", "instance", "that", "starts", "a", "mongod", "executable", "with", "arguments", "constructed", "from", "kwargs", "." ]
def mongod_program(logger, executable=None, process_kwargs=None, **kwargs): """ Returns a Process instance that starts a mongod executable with arguments constructed from 'kwargs'. """ executable = utils.default_if_none(executable, config.DEFAULT_MONGOD_EXECUTABLE) args = [executable] # Apply the --setParameter command line argument. Command line options to resmoke.py override # the YAML configuration. suite_set_parameters = kwargs.pop("set_parameters", {}) if config.MONGOD_SET_PARAMETERS is not None: suite_set_parameters.update(utils.load_yaml(config.MONGOD_SET_PARAMETERS)) _apply_set_parameters(args, suite_set_parameters) shortcut_opts = { "nojournal": config.NO_JOURNAL, "nopreallocj": config.NO_PREALLOC_JOURNAL, "storageEngine": config.STORAGE_ENGINE, "wiredTigerCollectionConfigString": config.WT_COLL_CONFIG, "wiredTigerEngineConfigString": config.WT_ENGINE_CONFIG, "wiredTigerIndexConfigString": config.WT_INDEX_CONFIG, } if config.STORAGE_ENGINE == "rocksdb": shortcut_opts["rocksdbCacheSizeGB"] = config.STORAGE_ENGINE_CACHE_SIZE elif config.STORAGE_ENGINE == "wiredTiger" or config.STORAGE_ENGINE is None: shortcut_opts["wiredTigerCacheSizeGB"] = config.STORAGE_ENGINE_CACHE_SIZE # These options are just flags, so they should not take a value. opts_without_vals = ("nojournal", "nopreallocj") # Have the --nojournal command line argument to resmoke.py unset the journal option. if shortcut_opts["nojournal"] and "journal" in kwargs: del kwargs["journal"] # Ensure that config servers run with journaling enabled. if "configsvr" in kwargs: shortcut_opts["nojournal"] = False kwargs["journal"] = "" # Command line options override the YAML configuration. for opt_name in shortcut_opts: opt_value = shortcut_opts[opt_name] if opt_name in opts_without_vals: # Options that are specified as --flag on the command line are represented by a boolean # value where True indicates that the flag should be included in 'kwargs'. if opt_value: kwargs[opt_name] = "" else: # Options that are specified as --key=value on the command line are represented by a # value where None indicates that the key-value pair shouldn't be included in 'kwargs'. if opt_value is not None: kwargs[opt_name] = opt_value # Override the storage engine specified on the command line with "wiredTiger" if running a # config server replica set. if "replSet" in kwargs and "configsvr" in kwargs: kwargs["storageEngine"] = "wiredTiger" # Apply the rest of the command line arguments. _apply_kwargs(args, kwargs) _set_keyfile_permissions(kwargs) process_kwargs = utils.default_if_none(process_kwargs, {}) return _process.Process(logger, args, **process_kwargs)
[ "def", "mongod_program", "(", "logger", ",", "executable", "=", "None", ",", "process_kwargs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "executable", "=", "utils", ".", "default_if_none", "(", "executable", ",", "config", ".", "DEFAULT_MONGOD_EXECUTABLE", ")", "args", "=", "[", "executable", "]", "# Apply the --setParameter command line argument. Command line options to resmoke.py override", "# the YAML configuration.", "suite_set_parameters", "=", "kwargs", ".", "pop", "(", "\"set_parameters\"", ",", "{", "}", ")", "if", "config", ".", "MONGOD_SET_PARAMETERS", "is", "not", "None", ":", "suite_set_parameters", ".", "update", "(", "utils", ".", "load_yaml", "(", "config", ".", "MONGOD_SET_PARAMETERS", ")", ")", "_apply_set_parameters", "(", "args", ",", "suite_set_parameters", ")", "shortcut_opts", "=", "{", "\"nojournal\"", ":", "config", ".", "NO_JOURNAL", ",", "\"nopreallocj\"", ":", "config", ".", "NO_PREALLOC_JOURNAL", ",", "\"storageEngine\"", ":", "config", ".", "STORAGE_ENGINE", ",", "\"wiredTigerCollectionConfigString\"", ":", "config", ".", "WT_COLL_CONFIG", ",", "\"wiredTigerEngineConfigString\"", ":", "config", ".", "WT_ENGINE_CONFIG", ",", "\"wiredTigerIndexConfigString\"", ":", "config", ".", "WT_INDEX_CONFIG", ",", "}", "if", "config", ".", "STORAGE_ENGINE", "==", "\"rocksdb\"", ":", "shortcut_opts", "[", "\"rocksdbCacheSizeGB\"", "]", "=", "config", ".", "STORAGE_ENGINE_CACHE_SIZE", "elif", "config", ".", "STORAGE_ENGINE", "==", "\"wiredTiger\"", "or", "config", ".", "STORAGE_ENGINE", "is", "None", ":", "shortcut_opts", "[", "\"wiredTigerCacheSizeGB\"", "]", "=", "config", ".", "STORAGE_ENGINE_CACHE_SIZE", "# These options are just flags, so they should not take a value.", "opts_without_vals", "=", "(", "\"nojournal\"", ",", "\"nopreallocj\"", ")", "# Have the --nojournal command line argument to resmoke.py unset the journal option.", "if", "shortcut_opts", "[", "\"nojournal\"", "]", "and", "\"journal\"", "in", "kwargs", ":", "del", "kwargs", "[", "\"journal\"", "]", "# Ensure that config servers run with journaling enabled.", "if", "\"configsvr\"", "in", "kwargs", ":", "shortcut_opts", "[", "\"nojournal\"", "]", "=", "False", "kwargs", "[", "\"journal\"", "]", "=", "\"\"", "# Command line options override the YAML configuration.", "for", "opt_name", "in", "shortcut_opts", ":", "opt_value", "=", "shortcut_opts", "[", "opt_name", "]", "if", "opt_name", "in", "opts_without_vals", ":", "# Options that are specified as --flag on the command line are represented by a boolean", "# value where True indicates that the flag should be included in 'kwargs'.", "if", "opt_value", ":", "kwargs", "[", "opt_name", "]", "=", "\"\"", "else", ":", "# Options that are specified as --key=value on the command line are represented by a", "# value where None indicates that the key-value pair shouldn't be included in 'kwargs'.", "if", "opt_value", "is", "not", "None", ":", "kwargs", "[", "opt_name", "]", "=", "opt_value", "# Override the storage engine specified on the command line with \"wiredTiger\" if running a", "# config server replica set.", "if", "\"replSet\"", "in", "kwargs", "and", "\"configsvr\"", "in", "kwargs", ":", "kwargs", "[", "\"storageEngine\"", "]", "=", "\"wiredTiger\"", "# Apply the rest of the command line arguments.", "_apply_kwargs", "(", "args", ",", "kwargs", ")", "_set_keyfile_permissions", "(", "kwargs", ")", "process_kwargs", "=", "utils", ".", "default_if_none", "(", "process_kwargs", ",", "{", "}", ")", "return", "_process", ".", "Process", "(", "logger", ",", "args", ",", "*", "*", "process_kwargs", ")" ]
https://github.com/Tencent/CMONGO/blob/c40380caa14e05509f46993aa8b8da966b09b0b5/buildscripts/resmokelib/core/programs.py#L19-L88
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
UTF8Strndup
(utf, len)
return ret
a strndup for array of UTF8's
a strndup for array of UTF8's
[ "a", "strndup", "for", "array", "of", "UTF8", "s" ]
def UTF8Strndup(utf, len): """a strndup for array of UTF8's """ ret = libxml2mod.xmlUTF8Strndup(utf, len) return ret
[ "def", "UTF8Strndup", "(", "utf", ",", "len", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUTF8Strndup", "(", "utf", ",", "len", ")", "return", "ret" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L1269-L1272
metashell/metashell
f4177e4854ea00c8dbc722cadab26ef413d798ea
3rd/templight/clang/bindings/python/clang/cindex.py
python
Cursor.location
(self)
return self._loc
Return the source location (the starting character) of the entity pointed at by the cursor.
Return the source location (the starting character) of the entity pointed at by the cursor.
[ "Return", "the", "source", "location", "(", "the", "starting", "character", ")", "of", "the", "entity", "pointed", "at", "by", "the", "cursor", "." ]
def location(self): """ Return the source location (the starting character) of the entity pointed at by the cursor. """ if not hasattr(self, '_loc'): self._loc = conf.lib.clang_getCursorLocation(self) return self._loc
[ "def", "location", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_loc'", ")", ":", "self", ".", "_loc", "=", "conf", ".", "lib", ".", "clang_getCursorLocation", "(", "self", ")", "return", "self", ".", "_loc" ]
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/clang/bindings/python/clang/cindex.py#L1574-L1582
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/grid.py
python
PyGridCellAttrProvider.GetAttr
(*args, **kwargs)
return _grid.PyGridCellAttrProvider_GetAttr(*args, **kwargs)
GetAttr(self, int row, int col, int kind) -> GridCellAttr
GetAttr(self, int row, int col, int kind) -> GridCellAttr
[ "GetAttr", "(", "self", "int", "row", "int", "col", "int", "kind", ")", "-", ">", "GridCellAttr" ]
def GetAttr(*args, **kwargs): """GetAttr(self, int row, int col, int kind) -> GridCellAttr""" return _grid.PyGridCellAttrProvider_GetAttr(*args, **kwargs)
[ "def", "GetAttr", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_grid", ".", "PyGridCellAttrProvider_GetAttr", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L725-L727
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBFrame.GetPC
(self)
return _lldb.SBFrame_GetPC(self)
GetPC(SBFrame self) -> lldb::addr_t
GetPC(SBFrame self) -> lldb::addr_t
[ "GetPC", "(", "SBFrame", "self", ")", "-", ">", "lldb", "::", "addr_t" ]
def GetPC(self): """GetPC(SBFrame self) -> lldb::addr_t""" return _lldb.SBFrame_GetPC(self)
[ "def", "GetPC", "(", "self", ")", ":", "return", "_lldb", ".", "SBFrame_GetPC", "(", "self", ")" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L5477-L5479
OSGeo/gdal
3748fc4ba4fba727492774b2b908a2130c864a83
swig/python/osgeo/osr.py
python
OSRAreaOfUse_east_lon_degree_get
(*args)
return _osr.OSRAreaOfUse_east_lon_degree_get(*args)
r"""OSRAreaOfUse_east_lon_degree_get(AreaOfUse area) -> double
r"""OSRAreaOfUse_east_lon_degree_get(AreaOfUse area) -> double
[ "r", "OSRAreaOfUse_east_lon_degree_get", "(", "AreaOfUse", "area", ")", "-", ">", "double" ]
def OSRAreaOfUse_east_lon_degree_get(*args): r"""OSRAreaOfUse_east_lon_degree_get(AreaOfUse area) -> double""" return _osr.OSRAreaOfUse_east_lon_degree_get(*args)
[ "def", "OSRAreaOfUse_east_lon_degree_get", "(", "*", "args", ")", ":", "return", "_osr", ".", "OSRAreaOfUse_east_lon_degree_get", "(", "*", "args", ")" ]
https://github.com/OSGeo/gdal/blob/3748fc4ba4fba727492774b2b908a2130c864a83/swig/python/osgeo/osr.py#L304-L306
blackberry/Boost
fc90c3fde129c62565c023f091eddc4a7ed9902b
tools/build/v2/build/property_set.py
python
PropertySet.add_raw
(self, properties)
return self.add (create (properties))
Creates a new property set containing the properties in this one, plus the ones passed as argument.
Creates a new property set containing the properties in this one, plus the ones passed as argument.
[ "Creates", "a", "new", "property", "set", "containing", "the", "properties", "in", "this", "one", "plus", "the", "ones", "passed", "as", "argument", "." ]
def add_raw (self, properties): """ Creates a new property set containing the properties in this one, plus the ones passed as argument. """ return self.add (create (properties))
[ "def", "add_raw", "(", "self", ",", "properties", ")", ":", "return", "self", ".", "add", "(", "create", "(", "properties", ")", ")" ]
https://github.com/blackberry/Boost/blob/fc90c3fde129c62565c023f091eddc4a7ed9902b/tools/build/v2/build/property_set.py#L409-L413
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/training/summary_io.py
python
_EventLoggerThread.__init__
(self, queue, ev_writer, flush_secs)
Creates an _EventLoggerThread. Args: queue: A Queue from which to dequeue events. ev_writer: An event writer. Used to log brain events for the visualizer. flush_secs: How often, in seconds, to flush the pending file to disk.
Creates an _EventLoggerThread.
[ "Creates", "an", "_EventLoggerThread", "." ]
def __init__(self, queue, ev_writer, flush_secs): """Creates an _EventLoggerThread. Args: queue: A Queue from which to dequeue events. ev_writer: An event writer. Used to log brain events for the visualizer. flush_secs: How often, in seconds, to flush the pending file to disk. """ threading.Thread.__init__(self) self.daemon = True self._queue = queue self._ev_writer = ev_writer self._flush_secs = flush_secs # The first event will be flushed immediately. self._next_event_flush_time = 0
[ "def", "__init__", "(", "self", ",", "queue", ",", "ev_writer", ",", "flush_secs", ")", ":", "threading", ".", "Thread", ".", "__init__", "(", "self", ")", "self", ".", "daemon", "=", "True", "self", ".", "_queue", "=", "queue", "self", ".", "_ev_writer", "=", "ev_writer", "self", ".", "_flush_secs", "=", "flush_secs", "# The first event will be flushed immediately.", "self", ".", "_next_event_flush_time", "=", "0" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/training/summary_io.py#L287-L303
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/WebOb/webob/datetime_utils.py
python
timedelta_to_seconds
(td)
return td.seconds + (td.days*24*60*60)
Converts a timedelta instance to seconds.
Converts a timedelta instance to seconds.
[ "Converts", "a", "timedelta", "instance", "to", "seconds", "." ]
def timedelta_to_seconds(td): """ Converts a timedelta instance to seconds. """ return td.seconds + (td.days*24*60*60)
[ "def", "timedelta_to_seconds", "(", "td", ")", ":", "return", "td", ".", "seconds", "+", "(", "td", ".", "days", "*", "24", "*", "60", "*", "60", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/WebOb/webob/datetime_utils.py#L48-L52
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/data/python/ops/enumerate_ops.py
python
enumerate_dataset
(start=0)
return enumerate_ops.enumerate_dataset(start)
A transformation that enumerate the elements of a dataset. It is Similar to python's `enumerate`. For example: ```python # NOTE: The following examples use `{ ... }` to represent the # contents of a dataset. a = { 1, 2, 3 } b = { (7, 8), (9, 10) } # The nested structure of the `datasets` argument determines the # structure of elements in the resulting dataset. a.apply(tf.contrib.data.enumerate(start=5)) == { (5, 1), (6, 2), (7, 3) } b.apply(tf.contrib.data.enumerate()) == { (0, (7, 8)), (1, (9, 10)) } ``` Args: start: A `tf.int64` scalar `tf.Tensor`, representing the start value for enumeration. Returns: A `Dataset` transformation function, which can be passed to `tf.data.Dataset.apply`.
A transformation that enumerate the elements of a dataset.
[ "A", "transformation", "that", "enumerate", "the", "elements", "of", "a", "dataset", "." ]
def enumerate_dataset(start=0): """A transformation that enumerate the elements of a dataset. It is Similar to python's `enumerate`. For example: ```python # NOTE: The following examples use `{ ... }` to represent the # contents of a dataset. a = { 1, 2, 3 } b = { (7, 8), (9, 10) } # The nested structure of the `datasets` argument determines the # structure of elements in the resulting dataset. a.apply(tf.contrib.data.enumerate(start=5)) == { (5, 1), (6, 2), (7, 3) } b.apply(tf.contrib.data.enumerate()) == { (0, (7, 8)), (1, (9, 10)) } ``` Args: start: A `tf.int64` scalar `tf.Tensor`, representing the start value for enumeration. Returns: A `Dataset` transformation function, which can be passed to `tf.data.Dataset.apply`. """ return enumerate_ops.enumerate_dataset(start)
[ "def", "enumerate_dataset", "(", "start", "=", "0", ")", ":", "return", "enumerate_ops", ".", "enumerate_dataset", "(", "start", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/data/python/ops/enumerate_ops.py#L27-L53
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/v8_inspector/third_party/jinja2/jinja2/runtime.py
python
Context.super
(self, name, current)
return BlockReference(name, self, blocks, index)
Render a parent block.
Render a parent block.
[ "Render", "a", "parent", "block", "." ]
def super(self, name, current): """Render a parent block.""" try: blocks = self.blocks[name] index = blocks.index(current) + 1 blocks[index] except LookupError: return self.environment.undefined('there is no parent block ' 'called %r.' % name, name='super') return BlockReference(name, self, blocks, index)
[ "def", "super", "(", "self", ",", "name", ",", "current", ")", ":", "try", ":", "blocks", "=", "self", ".", "blocks", "[", "name", "]", "index", "=", "blocks", ".", "index", "(", "current", ")", "+", "1", "blocks", "[", "index", "]", "except", "LookupError", ":", "return", "self", ".", "environment", ".", "undefined", "(", "'there is no parent block '", "'called %r.'", "%", "name", ",", "name", "=", "'super'", ")", "return", "BlockReference", "(", "name", ",", "self", ",", "blocks", ",", "index", ")" ]
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/deps/v8_inspector/third_party/jinja2/jinja2/runtime.py#L128-L138
tum-vision/fusenet
a1451be2971b348a01b0f525c2a3a7a0e215a591
examples/pycaffe/tools.py
python
SimpleTransformer.deprocess
(self, im)
return np.uint8(im)
inverse of preprocess()
inverse of preprocess()
[ "inverse", "of", "preprocess", "()" ]
def deprocess(self, im): """ inverse of preprocess() """ im = im.transpose(1, 2, 0) im /= self.scale im += self.mean im = im[:, :, ::-1] # change to RGB return np.uint8(im)
[ "def", "deprocess", "(", "self", ",", "im", ")", ":", "im", "=", "im", ".", "transpose", "(", "1", ",", "2", ",", "0", ")", "im", "/=", "self", ".", "scale", "im", "+=", "self", ".", "mean", "im", "=", "im", "[", ":", ",", ":", ",", ":", ":", "-", "1", "]", "# change to RGB", "return", "np", ".", "uint8", "(", "im", ")" ]
https://github.com/tum-vision/fusenet/blob/a1451be2971b348a01b0f525c2a3a7a0e215a591/examples/pycaffe/tools.py#L41-L50
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/framework/ops.py
python
Graph.get_collection
(self, name, scope=None)
Returns a list of values in the collection with the given `name`. This is different from `get_collection_ref()` which always returns the actual collection list if it exists in that it returns a new list each time it is called. Args: name: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. scope: (Optional.) A string. If supplied, the resulting list is filtered to include only items whose `name` attribute matches `scope` using `re.match`. Items without a `name` attribute are never returned if a scope is supplied. The choice of `re.match` means that a `scope` without special tokens filters by prefix. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. The list contains the values in the order under which they were collected.
Returns a list of values in the collection with the given `name`.
[ "Returns", "a", "list", "of", "values", "in", "the", "collection", "with", "the", "given", "name", "." ]
def get_collection(self, name, scope=None): """Returns a list of values in the collection with the given `name`. This is different from `get_collection_ref()` which always returns the actual collection list if it exists in that it returns a new list each time it is called. Args: name: The key for the collection. For example, the `GraphKeys` class contains many standard names for collections. scope: (Optional.) A string. If supplied, the resulting list is filtered to include only items whose `name` attribute matches `scope` using `re.match`. Items without a `name` attribute are never returned if a scope is supplied. The choice of `re.match` means that a `scope` without special tokens filters by prefix. Returns: The list of values in the collection with the given `name`, or an empty list if no value has been added to that collection. The list contains the values in the order under which they were collected. """ # pylint: disable=g-doc-exception _assert_collection_is_ok(name) with self._lock: collection = self._collections.get(name, None) if collection is None: return [] if scope is None: return list(collection) else: c = [] regex = re.compile(scope) for item in collection: if hasattr(item, "name") and regex.match(item.name): c.append(item) return c
[ "def", "get_collection", "(", "self", ",", "name", ",", "scope", "=", "None", ")", ":", "# pylint: disable=g-doc-exception", "_assert_collection_is_ok", "(", "name", ")", "with", "self", ".", "_lock", ":", "collection", "=", "self", ".", "_collections", ".", "get", "(", "name", ",", "None", ")", "if", "collection", "is", "None", ":", "return", "[", "]", "if", "scope", "is", "None", ":", "return", "list", "(", "collection", ")", "else", ":", "c", "=", "[", "]", "regex", "=", "re", ".", "compile", "(", "scope", ")", "for", "item", "in", "collection", ":", "if", "hasattr", "(", "item", ",", "\"name\"", ")", "and", "regex", ".", "match", "(", "item", ".", "name", ")", ":", "c", ".", "append", "(", "item", ")", "return", "c" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/framework/ops.py#L3392-L3427
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/Interface/ui/sans_isis/sans_data_processor_gui.py
python
SANSDataProcessorGui._check_output_mode
(self, value)
Set the output mode radio button from a SANS enum. This method is called when: 1. The gui is launched 2. Via the presenter, from state :param value: An OutputMode (SANS enum) object
Set the output mode radio button from a SANS enum. This method is called when: 1. The gui is launched 2. Via the presenter, from state :param value: An OutputMode (SANS enum) object
[ "Set", "the", "output", "mode", "radio", "button", "from", "a", "SANS", "enum", ".", "This", "method", "is", "called", "when", ":", "1", ".", "The", "gui", "is", "launched", "2", ".", "Via", "the", "presenter", "from", "state", ":", "param", "value", ":", "An", "OutputMode", "(", "SANS", "enum", ")", "object" ]
def _check_output_mode(self, value): """ Set the output mode radio button from a SANS enum. This method is called when: 1. The gui is launched 2. Via the presenter, from state :param value: An OutputMode (SANS enum) object """ if value is OutputMode.PUBLISH_TO_ADS: self.output_mode_memory_radio_button.setChecked(True) elif value is OutputMode.SAVE_TO_FILE: self.output_mode_file_radio_button.setChecked(True) elif value is OutputMode.BOTH: self.output_mode_both_radio_button.setChecked(True) # Notify the presenter self._call_settings_listeners(lambda listener: listener.on_output_mode_changed())
[ "def", "_check_output_mode", "(", "self", ",", "value", ")", ":", "if", "value", "is", "OutputMode", ".", "PUBLISH_TO_ADS", ":", "self", ".", "output_mode_memory_radio_button", ".", "setChecked", "(", "True", ")", "elif", "value", "is", "OutputMode", ".", "SAVE_TO_FILE", ":", "self", ".", "output_mode_file_radio_button", ".", "setChecked", "(", "True", ")", "elif", "value", "is", "OutputMode", ".", "BOTH", ":", "self", ".", "output_mode_both_radio_button", ".", "setChecked", "(", "True", ")", "# Notify the presenter", "self", ".", "_call_settings_listeners", "(", "lambda", "listener", ":", "listener", ".", "on_output_mode_changed", "(", ")", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Interface/ui/sans_isis/sans_data_processor_gui.py#L609-L625
Illumina/hap.py
84011695b2ff2406c16a335106db6831fb67fdfe
src/python/Tools/__init__.py
python
BGZipFile.__init__
(self, filename, force=False)
Make a subprocess for bgzip :param filename: name of the output file :param force: true to overwrite if file exists
Make a subprocess for bgzip :param filename: name of the output file :param force: true to overwrite if file exists
[ "Make", "a", "subprocess", "for", "bgzip", ":", "param", "filename", ":", "name", "of", "the", "output", "file", ":", "param", "force", ":", "true", "to", "overwrite", "if", "file", "exists" ]
def __init__(self, filename, force=False): """ Make a subprocess for bgzip :param filename: name of the output file :param force: true to overwrite if file exists """ if os.path.exists(filename) and not force: raise Exception("File %s exists, use force=True to overwrite" % filename) self.write_file = open(filename, "wb") zip_pipe = subprocess.Popen(["bgzip", "-f"], stdin=subprocess.PIPE, stdout=self.write_file, stderr=subprocess.PIPE, shell=True) self.zip_pipe = zip_pipe self.name = filename
[ "def", "__init__", "(", "self", ",", "filename", ",", "force", "=", "False", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "filename", ")", "and", "not", "force", ":", "raise", "Exception", "(", "\"File %s exists, use force=True to overwrite\"", "%", "filename", ")", "self", ".", "write_file", "=", "open", "(", "filename", ",", "\"wb\"", ")", "zip_pipe", "=", "subprocess", ".", "Popen", "(", "[", "\"bgzip\"", ",", "\"-f\"", "]", ",", "stdin", "=", "subprocess", ".", "PIPE", ",", "stdout", "=", "self", ".", "write_file", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ")", "self", ".", "zip_pipe", "=", "zip_pipe", "self", ".", "name", "=", "filename" ]
https://github.com/Illumina/hap.py/blob/84011695b2ff2406c16a335106db6831fb67fdfe/src/python/Tools/__init__.py#L179-L194
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/python2_version/klampt/src/robotsim.py
python
SimBody.enableDynamics
(self, enabled=True)
return _robotsim.SimBody_enableDynamics(self, enabled)
enableDynamics(SimBody self, bool enabled=True) enableDynamics(SimBody self) Sets the dynamic simulation of the body on/off. If false, velocities will simply be integrated forward, and forces will not affect velocity i.e., it will be pure kinematic simulation.
enableDynamics(SimBody self, bool enabled=True) enableDynamics(SimBody self)
[ "enableDynamics", "(", "SimBody", "self", "bool", "enabled", "=", "True", ")", "enableDynamics", "(", "SimBody", "self", ")" ]
def enableDynamics(self, enabled=True): """ enableDynamics(SimBody self, bool enabled=True) enableDynamics(SimBody self) Sets the dynamic simulation of the body on/off. If false, velocities will simply be integrated forward, and forces will not affect velocity i.e., it will be pure kinematic simulation. """ return _robotsim.SimBody_enableDynamics(self, enabled)
[ "def", "enableDynamics", "(", "self", ",", "enabled", "=", "True", ")", ":", "return", "_robotsim", ".", "SimBody_enableDynamics", "(", "self", ",", "enabled", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/src/robotsim.py#L7879-L7891
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_windows.py
python
Frame.GetStatusBar
(*args, **kwargs)
return _windows_.Frame_GetStatusBar(*args, **kwargs)
GetStatusBar(self) -> StatusBar
GetStatusBar(self) -> StatusBar
[ "GetStatusBar", "(", "self", ")", "-", ">", "StatusBar" ]
def GetStatusBar(*args, **kwargs): """GetStatusBar(self) -> StatusBar""" return _windows_.Frame_GetStatusBar(*args, **kwargs)
[ "def", "GetStatusBar", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "Frame_GetStatusBar", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L617-L619
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/multiprocessing/__init__.py
python
Semaphore
(value=1)
return Semaphore(value)
Returns a semaphore object
Returns a semaphore object
[ "Returns", "a", "semaphore", "object" ]
def Semaphore(value=1): ''' Returns a semaphore object ''' from multiprocessing.synchronize import Semaphore return Semaphore(value)
[ "def", "Semaphore", "(", "value", "=", "1", ")", ":", "from", "multiprocessing", ".", "synchronize", "import", "Semaphore", "return", "Semaphore", "(", "value", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/multiprocessing/__init__.py#L192-L197
kushview/Element
1cc16380caa2ab79461246ba758b9de1f46db2a5
waflib/Utils.py
python
lru_cache.__init__
(self, maxlen=100)
Maximum amount of elements in the cache
Maximum amount of elements in the cache
[ "Maximum", "amount", "of", "elements", "in", "the", "cache" ]
def __init__(self, maxlen=100): self.maxlen = maxlen """ Maximum amount of elements in the cache """ self.table = {} """ Mapping key-value """ self.head = lru_node() self.head.next = self.head self.head.prev = self.head
[ "def", "__init__", "(", "self", ",", "maxlen", "=", "100", ")", ":", "self", ".", "maxlen", "=", "maxlen", "self", ".", "table", "=", "{", "}", "\"\"\"\n\t\tMapping key-value\n\t\t\"\"\"", "self", ".", "head", "=", "lru_node", "(", ")", "self", ".", "head", ".", "next", "=", "self", ".", "head", "self", ".", "head", ".", "prev", "=", "self", ".", "head" ]
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/Utils.py#L139-L150
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/SANS/SANSUtility.py
python
is_valid_ws_for_removing_zero_errors
(input_workspace_name)
return message, isValid
Check if a workspace has been created via Q1D or Qxy. @param ws :: The input workspace
Check if a workspace has been created via Q1D or Qxy.
[ "Check", "if", "a", "workspace", "has", "been", "created", "via", "Q1D", "or", "Qxy", "." ]
def is_valid_ws_for_removing_zero_errors(input_workspace_name): ''' Check if a workspace has been created via Q1D or Qxy. @param ws :: The input workspace ''' isValid = False message = "" ws = mtd[input_workspace_name] workspaceHistory= ws.getHistory() histories = workspaceHistory.getAlgorithmHistories() for history in histories: name = history.name() if name == 'Q1D' or name == 'Qxy': isValid = True break if not isValid: message = ("Workspace does not seem valid for zero error removal." "It must have been reduced with Q1D or Qxy.") return message, isValid
[ "def", "is_valid_ws_for_removing_zero_errors", "(", "input_workspace_name", ")", ":", "isValid", "=", "False", "message", "=", "\"\"", "ws", "=", "mtd", "[", "input_workspace_name", "]", "workspaceHistory", "=", "ws", ".", "getHistory", "(", ")", "histories", "=", "workspaceHistory", ".", "getAlgorithmHistories", "(", ")", "for", "history", "in", "histories", ":", "name", "=", "history", ".", "name", "(", ")", "if", "name", "==", "'Q1D'", "or", "name", "==", "'Qxy'", ":", "isValid", "=", "True", "break", "if", "not", "isValid", ":", "message", "=", "(", "\"Workspace does not seem valid for zero error removal.\"", "\"It must have been reduced with Q1D or Qxy.\"", ")", "return", "message", ",", "isValid" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/SANS/SANSUtility.py#L941-L962
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distro.py
python
LinuxDistribution.minor_version
(self, best=False)
return self.version_parts(best)[1]
Return the minor version number of the current distribution. For details, see :func:`distro.minor_version`.
Return the minor version number of the current distribution.
[ "Return", "the", "minor", "version", "number", "of", "the", "current", "distribution", "." ]
def minor_version(self, best=False): """ Return the minor version number of the current distribution. For details, see :func:`distro.minor_version`. """ return self.version_parts(best)[1]
[ "def", "minor_version", "(", "self", ",", "best", "=", "False", ")", ":", "return", "self", ".", "version_parts", "(", "best", ")", "[", "1", "]" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/distro.py#L791-L797
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/rlcompleter.py
python
Completer.complete
(self, text, state)
Return the next possible completion for 'text'. This is called successively with state == 0, 1, 2, ... until it returns None. The completion should begin with 'text'.
Return the next possible completion for 'text'.
[ "Return", "the", "next", "possible", "completion", "for", "text", "." ]
def complete(self, text, state): """Return the next possible completion for 'text'. This is called successively with state == 0, 1, 2, ... until it returns None. The completion should begin with 'text'. """ if self.use_main_ns: self.namespace = __main__.__dict__ if state == 0: if "." in text: self.matches = self.attr_matches(text) else: self.matches = self.global_matches(text) try: return self.matches[state] except IndexError: return None
[ "def", "complete", "(", "self", ",", "text", ",", "state", ")", ":", "if", "self", ".", "use_main_ns", ":", "self", ".", "namespace", "=", "__main__", ".", "__dict__", "if", "state", "==", "0", ":", "if", "\".\"", "in", "text", ":", "self", ".", "matches", "=", "self", ".", "attr_matches", "(", "text", ")", "else", ":", "self", ".", "matches", "=", "self", ".", "global_matches", "(", "text", ")", "try", ":", "return", "self", ".", "matches", "[", "state", "]", "except", "IndexError", ":", "return", "None" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/rlcompleter.py#L71-L89
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py
python
ElasticAverageOptimizer.apply_gradients
(self, grads_and_vars, global_step=None, name=None)
return conditional_update
Apply gradients to global variables. This is the second part of `minimize()`. It returns an `Operation` that applies gradients. Args: grads_and_vars: List of (gradient, variable) pairs as returned by `compute_gradients()`. global_step: Optional `Variable` to increment by one after the variables have been updated. name: Optional name for the returned operation. Default to the name passed to the `Optimizer` constructor. Returns: An `Operation` that applies the specified gradients. If `global_step` was not None, that operation also increments `global_step`. Raises: TypeError: If `grads_and_vars` is malformed. ValueError: If none of the variables have gradients.
Apply gradients to global variables.
[ "Apply", "gradients", "to", "global", "variables", "." ]
def apply_gradients(self, grads_and_vars, global_step=None, name=None): """Apply gradients to global variables. This is the second part of `minimize()`. It returns an `Operation` that applies gradients. Args: grads_and_vars: List of (gradient, variable) pairs as returned by `compute_gradients()`. global_step: Optional `Variable` to increment by one after the variables have been updated. name: Optional name for the returned operation. Default to the name passed to the `Optimizer` constructor. Returns: An `Operation` that applies the specified gradients. If `global_step` was not None, that operation also increments `global_step`. Raises: TypeError: If `grads_and_vars` is malformed. ValueError: If none of the variables have gradients. """ global_old = set(n.op.name for n in variables.global_variables()) apply_updates = self._opt.apply_gradients(grads_and_vars) global_new = set(n.op.name for n in variables.global_variables()) with ops.control_dependencies([apply_updates]): local_update = state_ops.assign_add( self._local_step, 1, name='local_step_update').op # this is for place the variables created by optimizer to local collection # e.g., AdamOptimizer will create beta as global variables def _adjust_optimizer_variable_collection(opt_vars): g = ops.get_default_graph() idx = 0 for _ in range(len(g._collections[ops.GraphKeys.GLOBAL_VARIABLES])): var = g.get_collection_ref(ops.GraphKeys.GLOBAL_VARIABLES)[idx] name = var.op.name if name in opt_vars: ops.add_to_collection(ops.GraphKeys.LOCAL_VARIABLES, var) del g.get_collection_ref(ops.GraphKeys.GLOBAL_VARIABLES)[idx] else: idx += 1 _adjust_optimizer_variable_collection(global_new - global_old) # update global variables. def _Update_global_variables(): local_vars = [v for g, v in grads_and_vars if g is not None] global_center_vars = [self._global_map[var] for var in local_vars] local_center_vars = [self._local_map[var] for var in local_vars] local_center_vars_update = [] for lvar, var in zip(local_center_vars, global_center_vars): local_center_vars_update.append(lvar.assign(var)) update_ops = [] differences = [] with ops.control_dependencies(local_center_vars_update): for v, lv in zip(local_vars, local_center_vars): with ops.device(v.device): differences.append(math_ops.subtract(v, lv)) for lvar, diff in zip(local_vars, differences): with ops.device(lvar.device): update_ops.append( state_ops.assign_sub(lvar, math_ops.multiply(self._moving_rate, diff))) for var, diff in zip(global_center_vars, differences): with ops.device(var.device): update_ops.append( state_ops.assign_add(var, math_ops.multiply(self._moving_rate, diff))) if global_step: with ops.colocate_with(global_step): update_ops.append(state_ops.assign_add(global_step, 1)) variable_update = control_flow_ops.group(*(update_ops)) return variable_update with ops.control_dependencies([local_update]): condition = math_ops.equal( math_ops.mod(self._local_step, self._period), 0) conditional_update = control_flow_ops.cond(condition, _Update_global_variables, control_flow_ops.no_op) return conditional_update
[ "def", "apply_gradients", "(", "self", ",", "grads_and_vars", ",", "global_step", "=", "None", ",", "name", "=", "None", ")", ":", "global_old", "=", "set", "(", "n", ".", "op", ".", "name", "for", "n", "in", "variables", ".", "global_variables", "(", ")", ")", "apply_updates", "=", "self", ".", "_opt", ".", "apply_gradients", "(", "grads_and_vars", ")", "global_new", "=", "set", "(", "n", ".", "op", ".", "name", "for", "n", "in", "variables", ".", "global_variables", "(", ")", ")", "with", "ops", ".", "control_dependencies", "(", "[", "apply_updates", "]", ")", ":", "local_update", "=", "state_ops", ".", "assign_add", "(", "self", ".", "_local_step", ",", "1", ",", "name", "=", "'local_step_update'", ")", ".", "op", "# this is for place the variables created by optimizer to local collection", "# e.g., AdamOptimizer will create beta as global variables", "def", "_adjust_optimizer_variable_collection", "(", "opt_vars", ")", ":", "g", "=", "ops", ".", "get_default_graph", "(", ")", "idx", "=", "0", "for", "_", "in", "range", "(", "len", "(", "g", ".", "_collections", "[", "ops", ".", "GraphKeys", ".", "GLOBAL_VARIABLES", "]", ")", ")", ":", "var", "=", "g", ".", "get_collection_ref", "(", "ops", ".", "GraphKeys", ".", "GLOBAL_VARIABLES", ")", "[", "idx", "]", "name", "=", "var", ".", "op", ".", "name", "if", "name", "in", "opt_vars", ":", "ops", ".", "add_to_collection", "(", "ops", ".", "GraphKeys", ".", "LOCAL_VARIABLES", ",", "var", ")", "del", "g", ".", "get_collection_ref", "(", "ops", ".", "GraphKeys", ".", "GLOBAL_VARIABLES", ")", "[", "idx", "]", "else", ":", "idx", "+=", "1", "_adjust_optimizer_variable_collection", "(", "global_new", "-", "global_old", ")", "# update global variables.", "def", "_Update_global_variables", "(", ")", ":", "local_vars", "=", "[", "v", "for", "g", ",", "v", "in", "grads_and_vars", "if", "g", "is", "not", "None", "]", "global_center_vars", "=", "[", "self", ".", "_global_map", "[", "var", "]", "for", "var", "in", "local_vars", "]", "local_center_vars", "=", "[", "self", ".", "_local_map", "[", "var", "]", "for", "var", "in", "local_vars", "]", "local_center_vars_update", "=", "[", "]", "for", "lvar", ",", "var", "in", "zip", "(", "local_center_vars", ",", "global_center_vars", ")", ":", "local_center_vars_update", ".", "append", "(", "lvar", ".", "assign", "(", "var", ")", ")", "update_ops", "=", "[", "]", "differences", "=", "[", "]", "with", "ops", ".", "control_dependencies", "(", "local_center_vars_update", ")", ":", "for", "v", ",", "lv", "in", "zip", "(", "local_vars", ",", "local_center_vars", ")", ":", "with", "ops", ".", "device", "(", "v", ".", "device", ")", ":", "differences", ".", "append", "(", "math_ops", ".", "subtract", "(", "v", ",", "lv", ")", ")", "for", "lvar", ",", "diff", "in", "zip", "(", "local_vars", ",", "differences", ")", ":", "with", "ops", ".", "device", "(", "lvar", ".", "device", ")", ":", "update_ops", ".", "append", "(", "state_ops", ".", "assign_sub", "(", "lvar", ",", "math_ops", ".", "multiply", "(", "self", ".", "_moving_rate", ",", "diff", ")", ")", ")", "for", "var", ",", "diff", "in", "zip", "(", "global_center_vars", ",", "differences", ")", ":", "with", "ops", ".", "device", "(", "var", ".", "device", ")", ":", "update_ops", ".", "append", "(", "state_ops", ".", "assign_add", "(", "var", ",", "math_ops", ".", "multiply", "(", "self", ".", "_moving_rate", ",", "diff", ")", ")", ")", "if", "global_step", ":", "with", "ops", ".", "colocate_with", "(", "global_step", ")", ":", "update_ops", ".", "append", "(", "state_ops", ".", "assign_add", "(", "global_step", ",", "1", ")", ")", "variable_update", "=", "control_flow_ops", ".", "group", "(", "*", "(", "update_ops", ")", ")", "return", "variable_update", "with", "ops", ".", "control_dependencies", "(", "[", "local_update", "]", ")", ":", "condition", "=", "math_ops", ".", "equal", "(", "math_ops", ".", "mod", "(", "self", ".", "_local_step", ",", "self", ".", "_period", ")", ",", "0", ")", "conditional_update", "=", "control_flow_ops", ".", "cond", "(", "condition", ",", "_Update_global_variables", ",", "control_flow_ops", ".", "no_op", ")", "return", "conditional_update" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/opt/python/training/elastic_average_optimizer.py#L268-L351
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
DateTime.GetNextWeekDay
(*args, **kwargs)
return _misc_.DateTime_GetNextWeekDay(*args, **kwargs)
GetNextWeekDay(self, int weekday) -> DateTime
GetNextWeekDay(self, int weekday) -> DateTime
[ "GetNextWeekDay", "(", "self", "int", "weekday", ")", "-", ">", "DateTime" ]
def GetNextWeekDay(*args, **kwargs): """GetNextWeekDay(self, int weekday) -> DateTime""" return _misc_.DateTime_GetNextWeekDay(*args, **kwargs)
[ "def", "GetNextWeekDay", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "DateTime_GetNextWeekDay", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L3857-L3859
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/html.py
python
HtmlWinParser.SetFontFixed
(*args, **kwargs)
return _html.HtmlWinParser_SetFontFixed(*args, **kwargs)
SetFontFixed(self, int x)
SetFontFixed(self, int x)
[ "SetFontFixed", "(", "self", "int", "x", ")" ]
def SetFontFixed(*args, **kwargs): """SetFontFixed(self, int x)""" return _html.HtmlWinParser_SetFontFixed(*args, **kwargs)
[ "def", "SetFontFixed", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlWinParser_SetFontFixed", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/html.py#L328-L330
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/resample.py
python
Resampler.quantile
(self, q=0.5, **kwargs)
return self._downsample('quantile', q=q, **kwargs)
Return value at the given quantile. .. versionadded:: 0.24.0 Parameters ---------- q : float or array-like, default 0.5 (50% quantile) See Also -------- Series.quantile DataFrame.quantile DataFrameGroupBy.quantile
Return value at the given quantile.
[ "Return", "value", "at", "the", "given", "quantile", "." ]
def quantile(self, q=0.5, **kwargs): """ Return value at the given quantile. .. versionadded:: 0.24.0 Parameters ---------- q : float or array-like, default 0.5 (50% quantile) See Also -------- Series.quantile DataFrame.quantile DataFrameGroupBy.quantile """ return self._downsample('quantile', q=q, **kwargs)
[ "def", "quantile", "(", "self", ",", "q", "=", "0.5", ",", "*", "*", "kwargs", ")", ":", "return", "self", ".", "_downsample", "(", "'quantile'", ",", "q", "=", "q", ",", "*", "*", "kwargs", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/resample.py#L830-L846
etternagame/etterna
8775f74ac9c353320128609d4b4150672e9a6d04
extern/crashpad/buildtools/checkdeps/results.py
python
DependeeStatus.HasViolations
(self)
return not not self.violations
Returns True if this dependee is violating one or more rules.
Returns True if this dependee is violating one or more rules.
[ "Returns", "True", "if", "this", "dependee", "is", "violating", "one", "or", "more", "rules", "." ]
def HasViolations(self): """Returns True if this dependee is violating one or more rules.""" return not not self.violations
[ "def", "HasViolations", "(", "self", ")", ":", "return", "not", "not", "self", ".", "violations" ]
https://github.com/etternagame/etterna/blob/8775f74ac9c353320128609d4b4150672e9a6d04/extern/crashpad/buildtools/checkdeps/results.py#L42-L44
SpenceKonde/megaTinyCore
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py
python
Avr8Protocol.program_counter_write
(self, program_counter)
Writes the program counter :param program_counter:
Writes the program counter
[ "Writes", "the", "program", "counter" ]
def program_counter_write(self, program_counter): """ Writes the program counter :param program_counter: """ self.check_response(self.jtagice3_command_response( bytearray([self.CMD_AVR8_PC_WRITE, self.CMD_VERSION0]) + binary.pack_le32(program_counter)))
[ "def", "program_counter_write", "(", "self", ",", "program_counter", ")", ":", "self", ".", "check_response", "(", "self", ".", "jtagice3_command_response", "(", "bytearray", "(", "[", "self", ".", "CMD_AVR8_PC_WRITE", ",", "self", ".", "CMD_VERSION0", "]", ")", "+", "binary", ".", "pack_le32", "(", "program_counter", ")", ")", ")" ]
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py#L424-L431
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
clang/bindings/python/clang/cindex.py
python
Type.kind
(self)
return TypeKind.from_id(self._kind_id)
Return the kind of this type.
Return the kind of this type.
[ "Return", "the", "kind", "of", "this", "type", "." ]
def kind(self): """Return the kind of this type.""" return TypeKind.from_id(self._kind_id)
[ "def", "kind", "(", "self", ")", ":", "return", "TypeKind", ".", "from_id", "(", "self", ".", "_kind_id", ")" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/clang/bindings/python/clang/cindex.py#L2187-L2189
swift/swift
12d031cf8177fdec0137f9aa7e2912fa23c4416b
3rdParty/SCons/scons-3.0.1/engine/SCons/SConf.py
python
SConfBase.AddTests
(self, tests)
Adds all the tests given in the tests dictionary to this SConf instance
Adds all the tests given in the tests dictionary to this SConf instance
[ "Adds", "all", "the", "tests", "given", "in", "the", "tests", "dictionary", "to", "this", "SConf", "instance" ]
def AddTests(self, tests): """Adds all the tests given in the tests dictionary to this SConf instance """ for name in list(tests.keys()): self.AddTest(name, tests[name])
[ "def", "AddTests", "(", "self", ",", "tests", ")", ":", "for", "name", "in", "list", "(", "tests", ".", "keys", "(", ")", ")", ":", "self", ".", "AddTest", "(", "name", ",", "tests", "[", "name", "]", ")" ]
https://github.com/swift/swift/blob/12d031cf8177fdec0137f9aa7e2912fa23c4416b/3rdParty/SCons/scons-3.0.1/engine/SCons/SConf.py#L670-L675
Illumina/hap.py
84011695b2ff2406c16a335106db6831fb67fdfe
src/python/Tools/vcfcallerinfo.py
python
CallerInfo.addVCF
(self, vcfname)
Add caller versions from a VCF :param vcfname: VCF file name
Add caller versions from a VCF :param vcfname: VCF file name
[ "Add", "caller", "versions", "from", "a", "VCF", ":", "param", "vcfname", ":", "VCF", "file", "name" ]
def addVCF(self, vcfname): """ Add caller versions from a VCF :param vcfname: VCF file name """ tf = tempfile.NamedTemporaryFile(delete=False) tf.close() vfh = {} try: sp = subprocess.Popen("vcfhdr2json '%s' '%s'" % (vcfname, tf.name), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) o, e = sp.communicate() if sp.returncode != 0: raise Exception("vcfhdr2json call failed: %s / %s" % (o, e)) vfh = json.load(open(tf.name)) finally: try: os.unlink(tf.name) except: pass cp = ['unknown', 'unknown', ''] gatk_callers = ["haplotypecaller", "unifiedgenotyper", "mutect"] sent_callers = ["haplotyper"] source_found = False for hf in vfh["fields"]: try: k = hf["key"] if k == "source": try: cp[0] = str(hf["values"]) except: cp[0] = hf["value"] if cp[0].startswith("Platypus_Version_"): cp[1] = cp[0][len("Platypus_Version_"):] cp[0] = "Platypus" source_found = True elif k == "source_version": try: cp[1] = str(hf["values"]) except: cp[1] = hf["value"] source_found = True elif k == "cmdline": try: cp[2] = str(hf["values"]) except: cp[2] = hf["value"] source_found = True elif k == "platypusOptions": try: cp[2] = str(hf["values"]) except: cp[2] = hf["value"] source_found = True elif k == "octopus": # octopus doesn't add a version self.callers.append(["octopus", "unknown", str(hf["values"])]) elif k.startswith("GATKCommandLine"): caller = "GATK" try: caller += "-" + hf["values"]["ID"] except: pass version = "unknown" try: version = hf["values"]["Version"] except: pass options = "" try: options = hf["values"]["CommandLineOptions"] except: pass if any(g in caller.lower() for g in gatk_callers): self.callers.append([caller, version, options]) elif k.startswith("SentieonCommandLine"): caller = "Sentieon" try: caller += "-" + hf["values"]["ID"] except: pass version = "unknown" try: version = hf["values"]["Version"] except: pass options = "" if any(s in caller.lower() for s in sent_callers): self.callers.append([caller, version]) except: pass if source_found: self.callers.append(cp)
[ "def", "addVCF", "(", "self", ",", "vcfname", ")", ":", "tf", "=", "tempfile", ".", "NamedTemporaryFile", "(", "delete", "=", "False", ")", "tf", ".", "close", "(", ")", "vfh", "=", "{", "}", "try", ":", "sp", "=", "subprocess", ".", "Popen", "(", "\"vcfhdr2json '%s' '%s'\"", "%", "(", "vcfname", ",", "tf", ".", "name", ")", ",", "shell", "=", "True", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", "o", ",", "e", "=", "sp", ".", "communicate", "(", ")", "if", "sp", ".", "returncode", "!=", "0", ":", "raise", "Exception", "(", "\"vcfhdr2json call failed: %s / %s\"", "%", "(", "o", ",", "e", ")", ")", "vfh", "=", "json", ".", "load", "(", "open", "(", "tf", ".", "name", ")", ")", "finally", ":", "try", ":", "os", ".", "unlink", "(", "tf", ".", "name", ")", "except", ":", "pass", "cp", "=", "[", "'unknown'", ",", "'unknown'", ",", "''", "]", "gatk_callers", "=", "[", "\"haplotypecaller\"", ",", "\"unifiedgenotyper\"", ",", "\"mutect\"", "]", "sent_callers", "=", "[", "\"haplotyper\"", "]", "source_found", "=", "False", "for", "hf", "in", "vfh", "[", "\"fields\"", "]", ":", "try", ":", "k", "=", "hf", "[", "\"key\"", "]", "if", "k", "==", "\"source\"", ":", "try", ":", "cp", "[", "0", "]", "=", "str", "(", "hf", "[", "\"values\"", "]", ")", "except", ":", "cp", "[", "0", "]", "=", "hf", "[", "\"value\"", "]", "if", "cp", "[", "0", "]", ".", "startswith", "(", "\"Platypus_Version_\"", ")", ":", "cp", "[", "1", "]", "=", "cp", "[", "0", "]", "[", "len", "(", "\"Platypus_Version_\"", ")", ":", "]", "cp", "[", "0", "]", "=", "\"Platypus\"", "source_found", "=", "True", "elif", "k", "==", "\"source_version\"", ":", "try", ":", "cp", "[", "1", "]", "=", "str", "(", "hf", "[", "\"values\"", "]", ")", "except", ":", "cp", "[", "1", "]", "=", "hf", "[", "\"value\"", "]", "source_found", "=", "True", "elif", "k", "==", "\"cmdline\"", ":", "try", ":", "cp", "[", "2", "]", "=", "str", "(", "hf", "[", "\"values\"", "]", ")", "except", ":", "cp", "[", "2", "]", "=", "hf", "[", "\"value\"", "]", "source_found", "=", "True", "elif", "k", "==", "\"platypusOptions\"", ":", "try", ":", "cp", "[", "2", "]", "=", "str", "(", "hf", "[", "\"values\"", "]", ")", "except", ":", "cp", "[", "2", "]", "=", "hf", "[", "\"value\"", "]", "source_found", "=", "True", "elif", "k", "==", "\"octopus\"", ":", "# octopus doesn't add a version", "self", ".", "callers", ".", "append", "(", "[", "\"octopus\"", ",", "\"unknown\"", ",", "str", "(", "hf", "[", "\"values\"", "]", ")", "]", ")", "elif", "k", ".", "startswith", "(", "\"GATKCommandLine\"", ")", ":", "caller", "=", "\"GATK\"", "try", ":", "caller", "+=", "\"-\"", "+", "hf", "[", "\"values\"", "]", "[", "\"ID\"", "]", "except", ":", "pass", "version", "=", "\"unknown\"", "try", ":", "version", "=", "hf", "[", "\"values\"", "]", "[", "\"Version\"", "]", "except", ":", "pass", "options", "=", "\"\"", "try", ":", "options", "=", "hf", "[", "\"values\"", "]", "[", "\"CommandLineOptions\"", "]", "except", ":", "pass", "if", "any", "(", "g", "in", "caller", ".", "lower", "(", ")", "for", "g", "in", "gatk_callers", ")", ":", "self", ".", "callers", ".", "append", "(", "[", "caller", ",", "version", ",", "options", "]", ")", "elif", "k", ".", "startswith", "(", "\"SentieonCommandLine\"", ")", ":", "caller", "=", "\"Sentieon\"", "try", ":", "caller", "+=", "\"-\"", "+", "hf", "[", "\"values\"", "]", "[", "\"ID\"", "]", "except", ":", "pass", "version", "=", "\"unknown\"", "try", ":", "version", "=", "hf", "[", "\"values\"", "]", "[", "\"Version\"", "]", "except", ":", "pass", "options", "=", "\"\"", "if", "any", "(", "s", "in", "caller", ".", "lower", "(", ")", "for", "s", "in", "sent_callers", ")", ":", "self", ".", "callers", ".", "append", "(", "[", "caller", ",", "version", "]", ")", "except", ":", "pass", "if", "source_found", ":", "self", ".", "callers", ".", "append", "(", "cp", ")" ]
https://github.com/Illumina/hap.py/blob/84011695b2ff2406c16a335106db6831fb67fdfe/src/python/Tools/vcfcallerinfo.py#L39-L135
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/autograph/operators/symbols.py
python
AttributeAccessSymbol.maybe_compute_value
(self)
Compute the value corresponding to the attribute access or `Undefined`. This will be `Undefined` if no such value exists either because there is no such attribute or if the base is itself undefined. Returns: value corresponding to the attribute access or `Undefined`
Compute the value corresponding to the attribute access or `Undefined`.
[ "Compute", "the", "value", "corresponding", "to", "the", "attribute", "access", "or", "Undefined", "." ]
def maybe_compute_value(self): """Compute the value corresponding to the attribute access or `Undefined`. This will be `Undefined` if no such value exists either because there is no such attribute or if the base is itself undefined. Returns: value corresponding to the attribute access or `Undefined` """ parent_value = self.parent_symbol.maybe_compute_value() if (is_undefined(parent_value) or getattr(parent_value, self.attr_name, None) is None): return Undefined(self.name) else: return parent_value.__getattribute__(self.attr_name)
[ "def", "maybe_compute_value", "(", "self", ")", ":", "parent_value", "=", "self", ".", "parent_symbol", ".", "maybe_compute_value", "(", ")", "if", "(", "is_undefined", "(", "parent_value", ")", "or", "getattr", "(", "parent_value", ",", "self", ".", "attr_name", ",", "None", ")", "is", "None", ")", ":", "return", "Undefined", "(", "self", ".", "name", ")", "else", ":", "return", "parent_value", ".", "__getattribute__", "(", "self", ".", "attr_name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/autograph/operators/symbols.py#L69-L83
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2class.py
python
uCSIsSpecials
(code)
return ret
Check whether the character is part of Specials UCS Block
Check whether the character is part of Specials UCS Block
[ "Check", "whether", "the", "character", "is", "part", "of", "Specials", "UCS", "Block" ]
def uCSIsSpecials(code): """Check whether the character is part of Specials UCS Block """ ret = libxml2mod.xmlUCSIsSpecials(code) return ret
[ "def", "uCSIsSpecials", "(", "code", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUCSIsSpecials", "(", "code", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L2073-L2076
NVIDIA/thrust
627dccb359a635afdd69e95a6cc59698f23f70e2
internal/benchmark/compare_benchmark_results.py
python
int_or_float
(x)
Convert `x` to either `int` or `float`, preferring `int`. Raises: ValueError : If `x` is not convertible to either `int` or `float`
Convert `x` to either `int` or `float`, preferring `int`.
[ "Convert", "x", "to", "either", "int", "or", "float", "preferring", "int", "." ]
def int_or_float(x): """Convert `x` to either `int` or `float`, preferring `int`. Raises: ValueError : If `x` is not convertible to either `int` or `float` """ try: return int(x) except ValueError: return float(x)
[ "def", "int_or_float", "(", "x", ")", ":", "try", ":", "return", "int", "(", "x", ")", "except", "ValueError", ":", "return", "float", "(", "x", ")" ]
https://github.com/NVIDIA/thrust/blob/627dccb359a635afdd69e95a6cc59698f23f70e2/internal/benchmark/compare_benchmark_results.py#L107-L116
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py
python
Decimal.next_toward
(self, other, context=None)
return ans
Returns the number closest to self, in the direction towards other. The result is the closest representable number to self (excluding self) that is in the direction towards other, unless both have the same value. If the two operands are numerically equal, then the result is a copy of self with the sign set to be the same as the sign of other.
Returns the number closest to self, in the direction towards other.
[ "Returns", "the", "number", "closest", "to", "self", "in", "the", "direction", "towards", "other", "." ]
def next_toward(self, other, context=None): """Returns the number closest to self, in the direction towards other. The result is the closest representable number to self (excluding self) that is in the direction towards other, unless both have the same value. If the two operands are numerically equal, then the result is a copy of self with the sign set to be the same as the sign of other. """ other = _convert_other(other, raiseit=True) if context is None: context = getcontext() ans = self._check_nans(other, context) if ans: return ans comparison = self._cmp(other) if comparison == 0: return self.copy_sign(other) if comparison == -1: ans = self.next_plus(context) else: # comparison == 1 ans = self.next_minus(context) # decide which flags to raise using value of ans if ans._isinfinity(): context._raise_error(Overflow, 'Infinite result from next_toward', ans._sign) context._raise_error(Inexact) context._raise_error(Rounded) elif ans.adjusted() < context.Emin: context._raise_error(Underflow) context._raise_error(Subnormal) context._raise_error(Inexact) context._raise_error(Rounded) # if precision == 1 then we don't raise Clamped for a # result 0E-Etiny. if not ans: context._raise_error(Clamped) return ans
[ "def", "next_toward", "(", "self", ",", "other", ",", "context", "=", "None", ")", ":", "other", "=", "_convert_other", "(", "other", ",", "raiseit", "=", "True", ")", "if", "context", "is", "None", ":", "context", "=", "getcontext", "(", ")", "ans", "=", "self", ".", "_check_nans", "(", "other", ",", "context", ")", "if", "ans", ":", "return", "ans", "comparison", "=", "self", ".", "_cmp", "(", "other", ")", "if", "comparison", "==", "0", ":", "return", "self", ".", "copy_sign", "(", "other", ")", "if", "comparison", "==", "-", "1", ":", "ans", "=", "self", ".", "next_plus", "(", "context", ")", "else", ":", "# comparison == 1", "ans", "=", "self", ".", "next_minus", "(", "context", ")", "# decide which flags to raise using value of ans", "if", "ans", ".", "_isinfinity", "(", ")", ":", "context", ".", "_raise_error", "(", "Overflow", ",", "'Infinite result from next_toward'", ",", "ans", ".", "_sign", ")", "context", ".", "_raise_error", "(", "Inexact", ")", "context", ".", "_raise_error", "(", "Rounded", ")", "elif", "ans", ".", "adjusted", "(", ")", "<", "context", ".", "Emin", ":", "context", ".", "_raise_error", "(", "Underflow", ")", "context", ".", "_raise_error", "(", "Subnormal", ")", "context", ".", "_raise_error", "(", "Inexact", ")", "context", ".", "_raise_error", "(", "Rounded", ")", "# if precision == 1 then we don't raise Clamped for a", "# result 0E-Etiny.", "if", "not", "ans", ":", "context", ".", "_raise_error", "(", "Clamped", ")", "return", "ans" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py#L3440-L3484
wy1iu/LargeMargin_Softmax_Loss
c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec
python/caffe/classifier.py
python
Classifier.predict
(self, inputs, oversample=True)
return predictions
Predict classification probabilities of inputs. Parameters ---------- inputs : iterable of (H x W x K) input ndarrays. oversample : boolean average predictions across center, corners, and mirrors when True (default). Center-only prediction when False. Returns ------- predictions: (N x C) ndarray of class probabilities for N images and C classes.
Predict classification probabilities of inputs.
[ "Predict", "classification", "probabilities", "of", "inputs", "." ]
def predict(self, inputs, oversample=True): """ Predict classification probabilities of inputs. Parameters ---------- inputs : iterable of (H x W x K) input ndarrays. oversample : boolean average predictions across center, corners, and mirrors when True (default). Center-only prediction when False. Returns ------- predictions: (N x C) ndarray of class probabilities for N images and C classes. """ # Scale to standardize input dimensions. input_ = np.zeros((len(inputs), self.image_dims[0], self.image_dims[1], inputs[0].shape[2]), dtype=np.float32) for ix, in_ in enumerate(inputs): input_[ix] = caffe.io.resize_image(in_, self.image_dims) if oversample: # Generate center, corner, and mirrored crops. input_ = caffe.io.oversample(input_, self.crop_dims) else: # Take center crop. center = np.array(self.image_dims) / 2.0 crop = np.tile(center, (1, 2))[0] + np.concatenate([ -self.crop_dims / 2.0, self.crop_dims / 2.0 ]) crop = crop.astype(int) input_ = input_[:, crop[0]:crop[2], crop[1]:crop[3], :] # Classify caffe_in = np.zeros(np.array(input_.shape)[[0, 3, 1, 2]], dtype=np.float32) for ix, in_ in enumerate(input_): caffe_in[ix] = self.transformer.preprocess(self.inputs[0], in_) out = self.forward_all(**{self.inputs[0]: caffe_in}) predictions = out[self.outputs[0]] # For oversampling, average predictions across crops. if oversample: predictions = predictions.reshape((len(predictions) / 10, 10, -1)) predictions = predictions.mean(1) return predictions
[ "def", "predict", "(", "self", ",", "inputs", ",", "oversample", "=", "True", ")", ":", "# Scale to standardize input dimensions.", "input_", "=", "np", ".", "zeros", "(", "(", "len", "(", "inputs", ")", ",", "self", ".", "image_dims", "[", "0", "]", ",", "self", ".", "image_dims", "[", "1", "]", ",", "inputs", "[", "0", "]", ".", "shape", "[", "2", "]", ")", ",", "dtype", "=", "np", ".", "float32", ")", "for", "ix", ",", "in_", "in", "enumerate", "(", "inputs", ")", ":", "input_", "[", "ix", "]", "=", "caffe", ".", "io", ".", "resize_image", "(", "in_", ",", "self", ".", "image_dims", ")", "if", "oversample", ":", "# Generate center, corner, and mirrored crops.", "input_", "=", "caffe", ".", "io", ".", "oversample", "(", "input_", ",", "self", ".", "crop_dims", ")", "else", ":", "# Take center crop.", "center", "=", "np", ".", "array", "(", "self", ".", "image_dims", ")", "/", "2.0", "crop", "=", "np", ".", "tile", "(", "center", ",", "(", "1", ",", "2", ")", ")", "[", "0", "]", "+", "np", ".", "concatenate", "(", "[", "-", "self", ".", "crop_dims", "/", "2.0", ",", "self", ".", "crop_dims", "/", "2.0", "]", ")", "crop", "=", "crop", ".", "astype", "(", "int", ")", "input_", "=", "input_", "[", ":", ",", "crop", "[", "0", "]", ":", "crop", "[", "2", "]", ",", "crop", "[", "1", "]", ":", "crop", "[", "3", "]", ",", ":", "]", "# Classify", "caffe_in", "=", "np", ".", "zeros", "(", "np", ".", "array", "(", "input_", ".", "shape", ")", "[", "[", "0", ",", "3", ",", "1", ",", "2", "]", "]", ",", "dtype", "=", "np", ".", "float32", ")", "for", "ix", ",", "in_", "in", "enumerate", "(", "input_", ")", ":", "caffe_in", "[", "ix", "]", "=", "self", ".", "transformer", ".", "preprocess", "(", "self", ".", "inputs", "[", "0", "]", ",", "in_", ")", "out", "=", "self", ".", "forward_all", "(", "*", "*", "{", "self", ".", "inputs", "[", "0", "]", ":", "caffe_in", "}", ")", "predictions", "=", "out", "[", "self", ".", "outputs", "[", "0", "]", "]", "# For oversampling, average predictions across crops.", "if", "oversample", ":", "predictions", "=", "predictions", ".", "reshape", "(", "(", "len", "(", "predictions", ")", "/", "10", ",", "10", ",", "-", "1", ")", ")", "predictions", "=", "predictions", ".", "mean", "(", "1", ")", "return", "predictions" ]
https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/python/caffe/classifier.py#L47-L98
yuxng/PoseCNN
9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04
lib/setup.py
python
locate_cuda
()
return cudaconfig
Locate the CUDA environment on the system Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64' and values giving the absolute path to each directory. Starts by looking for the CUDAHOME env variable. If not found, everything is based on finding 'nvcc' in the PATH.
Locate the CUDA environment on the system
[ "Locate", "the", "CUDA", "environment", "on", "the", "system" ]
def locate_cuda(): """Locate the CUDA environment on the system Returns a dict with keys 'home', 'nvcc', 'include', and 'lib64' and values giving the absolute path to each directory. Starts by looking for the CUDAHOME env variable. If not found, everything is based on finding 'nvcc' in the PATH. """ # first check if the CUDAHOME env variable is in use if 'CUDAHOME' in os.environ: home = os.environ['CUDAHOME'] nvcc = pjoin(home, 'bin', 'nvcc') else: # otherwise, search the PATH for NVCC default_path = pjoin(os.sep, 'usr', 'local', 'cuda', 'bin') nvcc = find_in_path('nvcc', os.environ['PATH'] + os.pathsep + default_path) if nvcc is None: raise EnvironmentError('The nvcc binary could not be ' 'located in your $PATH. Either add it to your path, or set $CUDAHOME') home = os.path.dirname(os.path.dirname(nvcc)) cudaconfig = {'home':home, 'nvcc':nvcc, 'include': pjoin(home, 'include'), 'lib64': pjoin(home, 'lib64')} for k, v in cudaconfig.iteritems(): if not os.path.exists(v): raise EnvironmentError('The CUDA %s path could not be located in %s' % (k, v)) return cudaconfig
[ "def", "locate_cuda", "(", ")", ":", "# first check if the CUDAHOME env variable is in use", "if", "'CUDAHOME'", "in", "os", ".", "environ", ":", "home", "=", "os", ".", "environ", "[", "'CUDAHOME'", "]", "nvcc", "=", "pjoin", "(", "home", ",", "'bin'", ",", "'nvcc'", ")", "else", ":", "# otherwise, search the PATH for NVCC", "default_path", "=", "pjoin", "(", "os", ".", "sep", ",", "'usr'", ",", "'local'", ",", "'cuda'", ",", "'bin'", ")", "nvcc", "=", "find_in_path", "(", "'nvcc'", ",", "os", ".", "environ", "[", "'PATH'", "]", "+", "os", ".", "pathsep", "+", "default_path", ")", "if", "nvcc", "is", "None", ":", "raise", "EnvironmentError", "(", "'The nvcc binary could not be '", "'located in your $PATH. Either add it to your path, or set $CUDAHOME'", ")", "home", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "nvcc", ")", ")", "cudaconfig", "=", "{", "'home'", ":", "home", ",", "'nvcc'", ":", "nvcc", ",", "'include'", ":", "pjoin", "(", "home", ",", "'include'", ")", ",", "'lib64'", ":", "pjoin", "(", "home", ",", "'lib64'", ")", "}", "for", "k", ",", "v", "in", "cudaconfig", ".", "iteritems", "(", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "v", ")", ":", "raise", "EnvironmentError", "(", "'The CUDA %s path could not be located in %s'", "%", "(", "k", ",", "v", ")", ")", "return", "cudaconfig" ]
https://github.com/yuxng/PoseCNN/blob/9f3dd7b7bce21dcafc05e8f18ccc90da3caabd04/lib/setup.py#L24-L54
daijifeng001/caffe-rfcn
543f8f6a4b7c88256ea1445ae951a12d1ad9cffd
scripts/cpp_lint.py
python
CheckForNonConstReference
(filename, clean_lines, linenum, nesting_state, error)
Check for non-const references. Separate from CheckLanguage since it scans backwards from current line, instead of scanning forward. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A _NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found.
Check for non-const references.
[ "Check", "for", "non", "-", "const", "references", "." ]
def CheckForNonConstReference(filename, clean_lines, linenum, nesting_state, error): """Check for non-const references. Separate from CheckLanguage since it scans backwards from current line, instead of scanning forward. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. nesting_state: A _NestingState instance which maintains information about the current stack of nested blocks being parsed. error: The function to call with any errors found. """ # Do nothing if there is no '&' on current line. line = clean_lines.elided[linenum] if '&' not in line: return # Long type names may be broken across multiple lines, usually in one # of these forms: # LongType # ::LongTypeContinued &identifier # LongType:: # LongTypeContinued &identifier # LongType< # ...>::LongTypeContinued &identifier # # If we detected a type split across two lines, join the previous # line to current line so that we can match const references # accordingly. # # Note that this only scans back one line, since scanning back # arbitrary number of lines would be expensive. If you have a type # that spans more than 2 lines, please use a typedef. if linenum > 1: previous = None if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line): # previous_line\n + ::current_line previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$', clean_lines.elided[linenum - 1]) elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line): # previous_line::\n + current_line previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$', clean_lines.elided[linenum - 1]) if previous: line = previous.group(1) + line.lstrip() else: # Check for templated parameter that is split across multiple lines endpos = line.rfind('>') if endpos > -1: (_, startline, startpos) = ReverseCloseExpression( clean_lines, linenum, endpos) if startpos > -1 and startline < linenum: # Found the matching < on an earlier line, collect all # pieces up to current line. line = '' for i in xrange(startline, linenum + 1): line += clean_lines.elided[i].strip() # Check for non-const references in function parameters. A single '&' may # found in the following places: # inside expression: binary & for bitwise AND # inside expression: unary & for taking the address of something # inside declarators: reference parameter # We will exclude the first two cases by checking that we are not inside a # function body, including one that was just introduced by a trailing '{'. # TODO(unknwon): Doesn't account for preprocessor directives. # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare]. check_params = False if not nesting_state.stack: check_params = True # top level elif (isinstance(nesting_state.stack[-1], _ClassInfo) or isinstance(nesting_state.stack[-1], _NamespaceInfo)): check_params = True # within class or namespace elif Match(r'.*{\s*$', line): if (len(nesting_state.stack) == 1 or isinstance(nesting_state.stack[-2], _ClassInfo) or isinstance(nesting_state.stack[-2], _NamespaceInfo)): check_params = True # just opened global/class/namespace block # We allow non-const references in a few standard places, like functions # called "swap()" or iostream operators like "<<" or ">>". Do not check # those function parameters. # # We also accept & in static_assert, which looks like a function but # it's actually a declaration expression. whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|' r'operator\s*[<>][<>]|' r'static_assert|COMPILE_ASSERT' r')\s*\(') if Search(whitelisted_functions, line): check_params = False elif not Search(r'\S+\([^)]*$', line): # Don't see a whitelisted function on this line. Actually we # didn't see any function name on this line, so this is likely a # multi-line parameter list. Try a bit harder to catch this case. for i in xrange(2): if (linenum > i and Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])): check_params = False break if check_params: decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls): if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter): error(filename, linenum, 'runtime/references', 2, 'Is this a non-const reference? ' 'If so, make const or use a pointer: ' + ReplaceAll(' *<', '<', parameter))
[ "def", "CheckForNonConstReference", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "nesting_state", ",", "error", ")", ":", "# Do nothing if there is no '&' on current line.", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "'&'", "not", "in", "line", ":", "return", "# Long type names may be broken across multiple lines, usually in one", "# of these forms:", "# LongType", "# ::LongTypeContinued &identifier", "# LongType::", "# LongTypeContinued &identifier", "# LongType<", "# ...>::LongTypeContinued &identifier", "#", "# If we detected a type split across two lines, join the previous", "# line to current line so that we can match const references", "# accordingly.", "#", "# Note that this only scans back one line, since scanning back", "# arbitrary number of lines would be expensive. If you have a type", "# that spans more than 2 lines, please use a typedef.", "if", "linenum", ">", "1", ":", "previous", "=", "None", "if", "Match", "(", "r'\\s*::(?:[\\w<>]|::)+\\s*&\\s*\\S'", ",", "line", ")", ":", "# previous_line\\n + ::current_line", "previous", "=", "Search", "(", "r'\\b((?:const\\s*)?(?:[\\w<>]|::)+[\\w<>])\\s*$'", ",", "clean_lines", ".", "elided", "[", "linenum", "-", "1", "]", ")", "elif", "Match", "(", "r'\\s*[a-zA-Z_]([\\w<>]|::)+\\s*&\\s*\\S'", ",", "line", ")", ":", "# previous_line::\\n + current_line", "previous", "=", "Search", "(", "r'\\b((?:const\\s*)?(?:[\\w<>]|::)+::)\\s*$'", ",", "clean_lines", ".", "elided", "[", "linenum", "-", "1", "]", ")", "if", "previous", ":", "line", "=", "previous", ".", "group", "(", "1", ")", "+", "line", ".", "lstrip", "(", ")", "else", ":", "# Check for templated parameter that is split across multiple lines", "endpos", "=", "line", ".", "rfind", "(", "'>'", ")", "if", "endpos", ">", "-", "1", ":", "(", "_", ",", "startline", ",", "startpos", ")", "=", "ReverseCloseExpression", "(", "clean_lines", ",", "linenum", ",", "endpos", ")", "if", "startpos", ">", "-", "1", "and", "startline", "<", "linenum", ":", "# Found the matching < on an earlier line, collect all", "# pieces up to current line.", "line", "=", "''", "for", "i", "in", "xrange", "(", "startline", ",", "linenum", "+", "1", ")", ":", "line", "+=", "clean_lines", ".", "elided", "[", "i", "]", ".", "strip", "(", ")", "# Check for non-const references in function parameters. A single '&' may", "# found in the following places:", "# inside expression: binary & for bitwise AND", "# inside expression: unary & for taking the address of something", "# inside declarators: reference parameter", "# We will exclude the first two cases by checking that we are not inside a", "# function body, including one that was just introduced by a trailing '{'.", "# TODO(unknwon): Doesn't account for preprocessor directives.", "# TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].", "check_params", "=", "False", "if", "not", "nesting_state", ".", "stack", ":", "check_params", "=", "True", "# top level", "elif", "(", "isinstance", "(", "nesting_state", ".", "stack", "[", "-", "1", "]", ",", "_ClassInfo", ")", "or", "isinstance", "(", "nesting_state", ".", "stack", "[", "-", "1", "]", ",", "_NamespaceInfo", ")", ")", ":", "check_params", "=", "True", "# within class or namespace", "elif", "Match", "(", "r'.*{\\s*$'", ",", "line", ")", ":", "if", "(", "len", "(", "nesting_state", ".", "stack", ")", "==", "1", "or", "isinstance", "(", "nesting_state", ".", "stack", "[", "-", "2", "]", ",", "_ClassInfo", ")", "or", "isinstance", "(", "nesting_state", ".", "stack", "[", "-", "2", "]", ",", "_NamespaceInfo", ")", ")", ":", "check_params", "=", "True", "# just opened global/class/namespace block", "# We allow non-const references in a few standard places, like functions", "# called \"swap()\" or iostream operators like \"<<\" or \">>\". Do not check", "# those function parameters.", "#", "# We also accept & in static_assert, which looks like a function but", "# it's actually a declaration expression.", "whitelisted_functions", "=", "(", "r'(?:[sS]wap(?:<\\w:+>)?|'", "r'operator\\s*[<>][<>]|'", "r'static_assert|COMPILE_ASSERT'", "r')\\s*\\('", ")", "if", "Search", "(", "whitelisted_functions", ",", "line", ")", ":", "check_params", "=", "False", "elif", "not", "Search", "(", "r'\\S+\\([^)]*$'", ",", "line", ")", ":", "# Don't see a whitelisted function on this line. Actually we", "# didn't see any function name on this line, so this is likely a", "# multi-line parameter list. Try a bit harder to catch this case.", "for", "i", "in", "xrange", "(", "2", ")", ":", "if", "(", "linenum", ">", "i", "and", "Search", "(", "whitelisted_functions", ",", "clean_lines", ".", "elided", "[", "linenum", "-", "i", "-", "1", "]", ")", ")", ":", "check_params", "=", "False", "break", "if", "check_params", ":", "decls", "=", "ReplaceAll", "(", "r'{[^}]*}'", ",", "' '", ",", "line", ")", "# exclude function body", "for", "parameter", "in", "re", ".", "findall", "(", "_RE_PATTERN_REF_PARAM", ",", "decls", ")", ":", "if", "not", "Match", "(", "_RE_PATTERN_CONST_REF_PARAM", ",", "parameter", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/references'", ",", "2", ",", "'Is this a non-const reference? '", "'If so, make const or use a pointer: '", "+", "ReplaceAll", "(", "' *<'", ",", "'<'", ",", "parameter", ")", ")" ]
https://github.com/daijifeng001/caffe-rfcn/blob/543f8f6a4b7c88256ea1445ae951a12d1ad9cffd/scripts/cpp_lint.py#L4134-L4244
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py
python
ParserElement.__invert__
( self )
return NotAny( self )
Implementation of ~ operator - returns C{L{NotAny}}
Implementation of ~ operator - returns C{L{NotAny}}
[ "Implementation", "of", "~", "operator", "-", "returns", "C", "{", "L", "{", "NotAny", "}}" ]
def __invert__( self ): """ Implementation of ~ operator - returns C{L{NotAny}} """ return NotAny( self )
[ "def", "__invert__", "(", "self", ")", ":", "return", "NotAny", "(", "self", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pkg_resources/_vendor/pyparsing.py#L2020-L2024
plaidml/plaidml
f3c6681db21460e5fdc11ae651d6d7b6c27f8262
plaidml/edsl/__init__.py
python
TensorDim.__mul__
(self, other)
return TensorDim(_dim_op(lib.PLAIDML_INT_OP_MUL, self, other))
Performs a multiplication between a TensorDim and another operand in a polynomial expression. Example: >>> N, M = TensorDims(2) >>> A = Placeholder(DType.FLOAT32, [3, 3]) >>> A.bind_dims(N, M) >>> R = Contraction().outShape(N * 5)
Performs a multiplication between a TensorDim and another operand in a polynomial expression.
[ "Performs", "a", "multiplication", "between", "a", "TensorDim", "and", "another", "operand", "in", "a", "polynomial", "expression", "." ]
def __mul__(self, other): """Performs a multiplication between a TensorDim and another operand in a polynomial expression. Example: >>> N, M = TensorDims(2) >>> A = Placeholder(DType.FLOAT32, [3, 3]) >>> A.bind_dims(N, M) >>> R = Contraction().outShape(N * 5) """ return TensorDim(_dim_op(lib.PLAIDML_INT_OP_MUL, self, other))
[ "def", "__mul__", "(", "self", ",", "other", ")", ":", "return", "TensorDim", "(", "_dim_op", "(", "lib", ".", "PLAIDML_INT_OP_MUL", ",", "self", ",", "other", ")", ")" ]
https://github.com/plaidml/plaidml/blob/f3c6681db21460e5fdc11ae651d6d7b6c27f8262/plaidml/edsl/__init__.py#L103-L113
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/google_appengine_cloudstorage/cloudstorage/cloudstorage_api.py
python
_Bucket.__iter__
(self)
Iter over the bucket. Yields: GCSFileStat: a GCSFileStat for an object in the bucket. They are ordered by GCSFileStat.filename.
Iter over the bucket.
[ "Iter", "over", "the", "bucket", "." ]
def __iter__(self): """Iter over the bucket. Yields: GCSFileStat: a GCSFileStat for an object in the bucket. They are ordered by GCSFileStat.filename. """ total = 0 max_keys = self._options.get('max-keys') while self._get_bucket_fut: status, resp_headers, content = self._get_bucket_fut.get_result() errors.check_status(status, [200], self._path, resp_headers=resp_headers, extras=self._options) if self._should_get_another_batch(content): self._get_bucket_fut = self._api.get_bucket_async( self._path + '?' + urllib.urlencode(self._options)) else: self._get_bucket_fut = None root = ET.fromstring(content) dirs = self._next_dir_gen(root) files = self._next_file_gen(root) next_file = files.next() next_dir = dirs.next() while ((max_keys is None or total < max_keys) and not (next_file is None and next_dir is None)): total += 1 if next_file is None: self._last_yield = next_dir next_dir = dirs.next() elif next_dir is None: self._last_yield = next_file next_file = files.next() elif next_dir < next_file: self._last_yield = next_dir next_dir = dirs.next() elif next_file < next_dir: self._last_yield = next_file next_file = files.next() else: logging.error( 'Should never reach. next file is %r. next dir is %r.', next_file, next_dir) if self._new_max_keys: self._new_max_keys -= 1 yield self._last_yield
[ "def", "__iter__", "(", "self", ")", ":", "total", "=", "0", "max_keys", "=", "self", ".", "_options", ".", "get", "(", "'max-keys'", ")", "while", "self", ".", "_get_bucket_fut", ":", "status", ",", "resp_headers", ",", "content", "=", "self", ".", "_get_bucket_fut", ".", "get_result", "(", ")", "errors", ".", "check_status", "(", "status", ",", "[", "200", "]", ",", "self", ".", "_path", ",", "resp_headers", "=", "resp_headers", ",", "extras", "=", "self", ".", "_options", ")", "if", "self", ".", "_should_get_another_batch", "(", "content", ")", ":", "self", ".", "_get_bucket_fut", "=", "self", ".", "_api", ".", "get_bucket_async", "(", "self", ".", "_path", "+", "'?'", "+", "urllib", ".", "urlencode", "(", "self", ".", "_options", ")", ")", "else", ":", "self", ".", "_get_bucket_fut", "=", "None", "root", "=", "ET", ".", "fromstring", "(", "content", ")", "dirs", "=", "self", ".", "_next_dir_gen", "(", "root", ")", "files", "=", "self", ".", "_next_file_gen", "(", "root", ")", "next_file", "=", "files", ".", "next", "(", ")", "next_dir", "=", "dirs", ".", "next", "(", ")", "while", "(", "(", "max_keys", "is", "None", "or", "total", "<", "max_keys", ")", "and", "not", "(", "next_file", "is", "None", "and", "next_dir", "is", "None", ")", ")", ":", "total", "+=", "1", "if", "next_file", "is", "None", ":", "self", ".", "_last_yield", "=", "next_dir", "next_dir", "=", "dirs", ".", "next", "(", ")", "elif", "next_dir", "is", "None", ":", "self", ".", "_last_yield", "=", "next_file", "next_file", "=", "files", ".", "next", "(", ")", "elif", "next_dir", "<", "next_file", ":", "self", ".", "_last_yield", "=", "next_dir", "next_dir", "=", "dirs", ".", "next", "(", ")", "elif", "next_file", "<", "next_dir", ":", "self", ".", "_last_yield", "=", "next_file", "next_file", "=", "files", ".", "next", "(", ")", "else", ":", "logging", ".", "error", "(", "'Should never reach. next file is %r. next dir is %r.'", ",", "next_file", ",", "next_dir", ")", "if", "self", ".", "_new_max_keys", ":", "self", ".", "_new_max_keys", "-=", "1", "yield", "self", ".", "_last_yield" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/google_appengine_cloudstorage/cloudstorage/cloudstorage_api.py#L308-L356
GeometryCollective/boundary-first-flattening
8250e5a0e85980ec50b5e8aa8f49dd6519f915cd
deps/nanogui/docs/exhale.py
python
ExhaleRoot.generateDirectoryView
(self, treeView)
Generates the file view hierarchy, writing it to ``self.directory_view_file``. :Parameters: ``treeView`` (bool) Whether or not to use the collapsibleList version. See the ``createTreeView`` description in :func:`exhale.generate`.
Generates the file view hierarchy, writing it to ``self.directory_view_file``.
[ "Generates", "the", "file", "view", "hierarchy", "writing", "it", "to", "self", ".", "directory_view_file", "." ]
def generateDirectoryView(self, treeView): ''' Generates the file view hierarchy, writing it to ``self.directory_view_file``. :Parameters: ``treeView`` (bool) Whether or not to use the collapsibleList version. See the ``createTreeView`` description in :func:`exhale.generate`. ''' directory_view_stream = StringIO() for d in self.dirs: d.toDirectoryView(0, directory_view_stream, treeView) # add potential missing files (not sure if this is possible though) missing = [] for f in sorted(self.files): if not f.in_directory_view: missing.append(f) found_missing = len(missing) > 0 if found_missing: idx = 0 last_missing_child = len(missing) - 1 for m in missing: m.toDirectoryView(0, directory_view_stream, treeView, idx == last_missing_child) idx += 1 elif treeView: # need to restart since there were no missing children found, otherwise the # last directory will not correctly have a lastChild directory_view_stream.close() directory_view_stream = StringIO() last_dir_index = len(self.dirs) - 1 for idx in range(last_dir_index + 1): curr_d = self.dirs[idx] curr_d.toDirectoryView(0, directory_view_stream, treeView, idx == last_dir_index) # extract the value from the stream and close it down directory_view_string = directory_view_stream.getvalue() directory_view_stream.close() # inject the raw html for the treeView unordered lists if treeView: # we need to indent everything to be under the .. raw:: html directive, add # indentation so the html is readable while we are at it indented = re.sub(r'(.+)', r' \1', directory_view_string) directory_view_string = \ '.. raw:: html\n\n' \ ' <ul class="treeView">\n' \ ' <li>\n' \ ' <ul class="collapsibleList">\n' \ '{}' \ ' </ul><!-- collapsibleList -->\n' \ ' </li><!-- only tree view element -->\n' \ ' </ul><!-- treeView -->\n'.format(indented) # write everything to file to be included in the root api later try: with open(self.directory_view_file, "w") as dvf: dvf.write("File Hierarchy\n{}\n\n{}\n\n".format(EXHALE_SECTION_HEADING, directory_view_string)) except Exception as e: exclaimError("Error writing the directory hierarchy: {}".format(e))
[ "def", "generateDirectoryView", "(", "self", ",", "treeView", ")", ":", "directory_view_stream", "=", "StringIO", "(", ")", "for", "d", "in", "self", ".", "dirs", ":", "d", ".", "toDirectoryView", "(", "0", ",", "directory_view_stream", ",", "treeView", ")", "# add potential missing files (not sure if this is possible though)", "missing", "=", "[", "]", "for", "f", "in", "sorted", "(", "self", ".", "files", ")", ":", "if", "not", "f", ".", "in_directory_view", ":", "missing", ".", "append", "(", "f", ")", "found_missing", "=", "len", "(", "missing", ")", ">", "0", "if", "found_missing", ":", "idx", "=", "0", "last_missing_child", "=", "len", "(", "missing", ")", "-", "1", "for", "m", "in", "missing", ":", "m", ".", "toDirectoryView", "(", "0", ",", "directory_view_stream", ",", "treeView", ",", "idx", "==", "last_missing_child", ")", "idx", "+=", "1", "elif", "treeView", ":", "# need to restart since there were no missing children found, otherwise the", "# last directory will not correctly have a lastChild", "directory_view_stream", ".", "close", "(", ")", "directory_view_stream", "=", "StringIO", "(", ")", "last_dir_index", "=", "len", "(", "self", ".", "dirs", ")", "-", "1", "for", "idx", "in", "range", "(", "last_dir_index", "+", "1", ")", ":", "curr_d", "=", "self", ".", "dirs", "[", "idx", "]", "curr_d", ".", "toDirectoryView", "(", "0", ",", "directory_view_stream", ",", "treeView", ",", "idx", "==", "last_dir_index", ")", "# extract the value from the stream and close it down", "directory_view_string", "=", "directory_view_stream", ".", "getvalue", "(", ")", "directory_view_stream", ".", "close", "(", ")", "# inject the raw html for the treeView unordered lists", "if", "treeView", ":", "# we need to indent everything to be under the .. raw:: html directive, add", "# indentation so the html is readable while we are at it", "indented", "=", "re", ".", "sub", "(", "r'(.+)'", ",", "r' \\1'", ",", "directory_view_string", ")", "directory_view_string", "=", "'.. raw:: html\\n\\n'", "' <ul class=\"treeView\">\\n'", "' <li>\\n'", "' <ul class=\"collapsibleList\">\\n'", "'{}'", "' </ul><!-- collapsibleList -->\\n'", "' </li><!-- only tree view element -->\\n'", "' </ul><!-- treeView -->\\n'", ".", "format", "(", "indented", ")", "# write everything to file to be included in the root api later", "try", ":", "with", "open", "(", "self", ".", "directory_view_file", ",", "\"w\"", ")", "as", "dvf", ":", "dvf", ".", "write", "(", "\"File Hierarchy\\n{}\\n\\n{}\\n\\n\"", ".", "format", "(", "EXHALE_SECTION_HEADING", ",", "directory_view_string", ")", ")", "except", "Exception", "as", "e", ":", "exclaimError", "(", "\"Error writing the directory hierarchy: {}\"", ".", "format", "(", "e", ")", ")" ]
https://github.com/GeometryCollective/boundary-first-flattening/blob/8250e5a0e85980ec50b5e8aa8f49dd6519f915cd/deps/nanogui/docs/exhale.py#L2749-L2812
EQEmu/Server
4a4158380551c134302a7517f1a59a14d8735d69
utils/scripts/opcode_handlers.py
python
loadclientopcodes
()
return True
Load CLIENT OPCODES into memory
Load CLIENT OPCODES into memory
[ "Load", "CLIENT", "OPCODES", "into", "memory" ]
def loadclientopcodes(): """ Load CLIENT OPCODES into memory """ if VERBOSE: dprint('entering \'loadclientopcodes()\'\n') bad_clients = [] for client in client_list: try: short_name = '/patch_{0}.conf'.format(client) file_name = '{0}/utils/patches{1}'.format( base_path, short_name ) vprint(file_name) with open(file_name, 'r') as data_file: if VERBOSE: dprint('->open: \'{0}\' in \'r\' mode\n'.format(file_name)) client_opcodes[client] = {} line_no = 0 for data_line in data_file: line_no += 1 key_begin = data_line.find('OP_') key_end = data_line.find('=', key_begin) if not key_begin == 0 or key_end < 0: continue val_begin = data_line.find('0x', key_end) val_end = val_begin + 6 if val_begin < 0: continue value = int(data_line[(val_begin + 2):val_end].lower(), 16) if value == 0: if VERBOSE: uprint('\nUNDEFINED OPCODE FOUND: ../utils/patches{0}({1}:{2}) [{3}][{4}] = {5}\n'.format( short_name, line_no, key_begin, client, data_line[key_begin:key_end], '0x{0}'.format(hex(value)[2:].zfill(4)) )) continue client_opcodes[client][data_line[key_begin:key_end]] = '0x{0}'.format(hex(value)[2:].zfill(4)) if VERBOSE: dprint('../utils/patches{0}({1}:{2}) [{3}][{4}] = {5}\n'.format( short_name, line_no, key_begin, client, data_line[key_begin:key_end], client_opcodes[client][data_line[key_begin:key_end]] )) data_file.close() if VERBOSE: dprint('->close: \'{0}\'\n'.format(file_name)) if not len(client_opcodes[client]) > 0: bad_clients.append(client) except: print('(Exception Error: {0}) loadclientopcodes() [{1}]'.format( sys.exc_info()[0], client )) dprint('<-except: \'{0} [{1}]\'\n'.format( sys.exc_info()[0], client )) bad_clients.append(client) for bad_client in bad_clients: vprint('Deleting \'{0}\' client from search criteria...'.format(bad_client)) client_list.remove(bad_client) dprint('->delete: \'{0}\' client\n'.format(bad_client)) if bad_client in client_opcodes: vprint('Deleting stale entries for \'{0}\' client...'.format(bad_client)) del client_opcodes[bad_client] dprint('->delete: \'{0}\' client opcode entries\n'.format(bad_client)) if not len(client_list) > 0: print('Could not locate valid clients...') dprint('leaving \'loadclientopcodes(): NO VALID CLIENTS EXIST\'\n\n') return False if not len(client_opcodes) > 0: print('Could not locate client opcode lists...') dprint('leaving \'loadclientopcodes(): CLIENT OPCODES NOT FOUND\'\n\n') return False if VERBOSE: dprint('leaving \'loadclientopcodes()\'\n\n') return True
[ "def", "loadclientopcodes", "(", ")", ":", "if", "VERBOSE", ":", "dprint", "(", "'entering \\'loadclientopcodes()\\'\\n'", ")", "bad_clients", "=", "[", "]", "for", "client", "in", "client_list", ":", "try", ":", "short_name", "=", "'/patch_{0}.conf'", ".", "format", "(", "client", ")", "file_name", "=", "'{0}/utils/patches{1}'", ".", "format", "(", "base_path", ",", "short_name", ")", "vprint", "(", "file_name", ")", "with", "open", "(", "file_name", ",", "'r'", ")", "as", "data_file", ":", "if", "VERBOSE", ":", "dprint", "(", "'->open: \\'{0}\\' in \\'r\\' mode\\n'", ".", "format", "(", "file_name", ")", ")", "client_opcodes", "[", "client", "]", "=", "{", "}", "line_no", "=", "0", "for", "data_line", "in", "data_file", ":", "line_no", "+=", "1", "key_begin", "=", "data_line", ".", "find", "(", "'OP_'", ")", "key_end", "=", "data_line", ".", "find", "(", "'='", ",", "key_begin", ")", "if", "not", "key_begin", "==", "0", "or", "key_end", "<", "0", ":", "continue", "val_begin", "=", "data_line", ".", "find", "(", "'0x'", ",", "key_end", ")", "val_end", "=", "val_begin", "+", "6", "if", "val_begin", "<", "0", ":", "continue", "value", "=", "int", "(", "data_line", "[", "(", "val_begin", "+", "2", ")", ":", "val_end", "]", ".", "lower", "(", ")", ",", "16", ")", "if", "value", "==", "0", ":", "if", "VERBOSE", ":", "uprint", "(", "'\\nUNDEFINED OPCODE FOUND: ../utils/patches{0}({1}:{2}) [{3}][{4}] = {5}\\n'", ".", "format", "(", "short_name", ",", "line_no", ",", "key_begin", ",", "client", ",", "data_line", "[", "key_begin", ":", "key_end", "]", ",", "'0x{0}'", ".", "format", "(", "hex", "(", "value", ")", "[", "2", ":", "]", ".", "zfill", "(", "4", ")", ")", ")", ")", "continue", "client_opcodes", "[", "client", "]", "[", "data_line", "[", "key_begin", ":", "key_end", "]", "]", "=", "'0x{0}'", ".", "format", "(", "hex", "(", "value", ")", "[", "2", ":", "]", ".", "zfill", "(", "4", ")", ")", "if", "VERBOSE", ":", "dprint", "(", "'../utils/patches{0}({1}:{2}) [{3}][{4}] = {5}\\n'", ".", "format", "(", "short_name", ",", "line_no", ",", "key_begin", ",", "client", ",", "data_line", "[", "key_begin", ":", "key_end", "]", ",", "client_opcodes", "[", "client", "]", "[", "data_line", "[", "key_begin", ":", "key_end", "]", "]", ")", ")", "data_file", ".", "close", "(", ")", "if", "VERBOSE", ":", "dprint", "(", "'->close: \\'{0}\\'\\n'", ".", "format", "(", "file_name", ")", ")", "if", "not", "len", "(", "client_opcodes", "[", "client", "]", ")", ">", "0", ":", "bad_clients", ".", "append", "(", "client", ")", "except", ":", "print", "(", "'(Exception Error: {0}) loadclientopcodes() [{1}]'", ".", "format", "(", "sys", ".", "exc_info", "(", ")", "[", "0", "]", ",", "client", ")", ")", "dprint", "(", "'<-except: \\'{0} [{1}]\\'\\n'", ".", "format", "(", "sys", ".", "exc_info", "(", ")", "[", "0", "]", ",", "client", ")", ")", "bad_clients", ".", "append", "(", "client", ")", "for", "bad_client", "in", "bad_clients", ":", "vprint", "(", "'Deleting \\'{0}\\' client from search criteria...'", ".", "format", "(", "bad_client", ")", ")", "client_list", ".", "remove", "(", "bad_client", ")", "dprint", "(", "'->delete: \\'{0}\\' client\\n'", ".", "format", "(", "bad_client", ")", ")", "if", "bad_client", "in", "client_opcodes", ":", "vprint", "(", "'Deleting stale entries for \\'{0}\\' client...'", ".", "format", "(", "bad_client", ")", ")", "del", "client_opcodes", "[", "bad_client", "]", "dprint", "(", "'->delete: \\'{0}\\' client opcode entries\\n'", ".", "format", "(", "bad_client", ")", ")", "if", "not", "len", "(", "client_list", ")", ">", "0", ":", "print", "(", "'Could not locate valid clients...'", ")", "dprint", "(", "'leaving \\'loadclientopcodes(): NO VALID CLIENTS EXIST\\'\\n\\n'", ")", "return", "False", "if", "not", "len", "(", "client_opcodes", ")", ">", "0", ":", "print", "(", "'Could not locate client opcode lists...'", ")", "dprint", "(", "'leaving \\'loadclientopcodes(): CLIENT OPCODES NOT FOUND\\'\\n\\n'", ")", "return", "False", "if", "VERBOSE", ":", "dprint", "(", "'leaving \\'loadclientopcodes()\\'\\n\\n'", ")", "return", "True" ]
https://github.com/EQEmu/Server/blob/4a4158380551c134302a7517f1a59a14d8735d69/utils/scripts/opcode_handlers.py#L262-L380
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/ogl/_drawn.py
python
DrawnShape.DrawArc
(self, centrePt, startPt, endPt)
Draw an arc.
Draw an arc.
[ "Draw", "an", "arc", "." ]
def DrawArc(self, centrePt, startPt, endPt): """Draw an arc.""" self._metafiles[self._currentAngle].DrawArc(centrePt, startPt, endPt)
[ "def", "DrawArc", "(", "self", ",", "centrePt", ",", "startPt", ",", "endPt", ")", ":", "self", ".", "_metafiles", "[", "self", ".", "_currentAngle", "]", ".", "DrawArc", "(", "centrePt", ",", "startPt", ",", "endPt", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/ogl/_drawn.py#L774-L776
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/klampt/robotsim.py
python
WorldModel.loadElement
(self, fn: str)
return _robotsim.WorldModel_loadElement(self, fn)
r""" Loads some element from a file, automatically detecting its type. Meshes are interpreted as terrains. Args: fn (str) Returns: The element's ID, or -1 if loading failed.
r""" Loads some element from a file, automatically detecting its type. Meshes are interpreted as terrains.
[ "r", "Loads", "some", "element", "from", "a", "file", "automatically", "detecting", "its", "type", ".", "Meshes", "are", "interpreted", "as", "terrains", "." ]
def loadElement(self, fn: str) ->int: r""" Loads some element from a file, automatically detecting its type. Meshes are interpreted as terrains. Args: fn (str) Returns: The element's ID, or -1 if loading failed. """ return _robotsim.WorldModel_loadElement(self, fn)
[ "def", "loadElement", "(", "self", ",", "fn", ":", "str", ")", "->", "int", ":", "return", "_robotsim", ".", "WorldModel_loadElement", "(", "self", ",", "fn", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/robotsim.py#L5865-L5878
ideawu/ssdb
f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4
deps/cpy/antlr3/tree.py
python
TreeParser.getErrorHeader
(self, e)
return (self.getGrammarFileName() + ": node from %sline %s:%s" % (['', "after "][e.approximateLineInfo], e.line, e.charPositionInLine ) )
Prefix error message with the grammar name because message is always intended for the programmer because the parser built the input tree not the user.
Prefix error message with the grammar name because message is always intended for the programmer because the parser built the input tree not the user.
[ "Prefix", "error", "message", "with", "the", "grammar", "name", "because", "message", "is", "always", "intended", "for", "the", "programmer", "because", "the", "parser", "built", "the", "input", "tree", "not", "the", "user", "." ]
def getErrorHeader(self, e): """ Prefix error message with the grammar name because message is always intended for the programmer because the parser built the input tree not the user. """ return (self.getGrammarFileName() + ": node from %sline %s:%s" % (['', "after "][e.approximateLineInfo], e.line, e.charPositionInLine ) )
[ "def", "getErrorHeader", "(", "self", ",", "e", ")", ":", "return", "(", "self", ".", "getGrammarFileName", "(", ")", "+", "\": node from %sline %s:%s\"", "%", "(", "[", "''", ",", "\"after \"", "]", "[", "e", ".", "approximateLineInfo", "]", ",", "e", ".", "line", ",", "e", ".", "charPositionInLine", ")", ")" ]
https://github.com/ideawu/ssdb/blob/f229ba277c7f7d0ca5a441c0c6fb3d1209af68e4/deps/cpy/antlr3/tree.py#L2123-L2136
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/tlslite/tlslite/integration/AsyncStateMachine.py
python
AsyncStateMachine.inReadEvent
(self)
Tell the state machine it can read from the socket.
Tell the state machine it can read from the socket.
[ "Tell", "the", "state", "machine", "it", "can", "read", "from", "the", "socket", "." ]
def inReadEvent(self): """Tell the state machine it can read from the socket.""" try: self._checkAssert() if self.handshaker: self._doHandshakeOp() elif self.closer: self._doCloseOp() elif self.reader: self._doReadOp() elif self.writer: self._doWriteOp() else: self.reader = self.tlsConnection.readAsync(16384) self._doReadOp() except: self._clear() raise
[ "def", "inReadEvent", "(", "self", ")", ":", "try", ":", "self", ".", "_checkAssert", "(", ")", "if", "self", ".", "handshaker", ":", "self", ".", "_doHandshakeOp", "(", ")", "elif", "self", ".", "closer", ":", "self", ".", "_doCloseOp", "(", ")", "elif", "self", ".", "reader", ":", "self", ".", "_doReadOp", "(", ")", "elif", "self", ".", "writer", ":", "self", ".", "_doWriteOp", "(", ")", "else", ":", "self", ".", "reader", "=", "self", ".", "tlsConnection", ".", "readAsync", "(", "16384", ")", "self", ".", "_doReadOp", "(", ")", "except", ":", "self", ".", "_clear", "(", ")", "raise" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/tlslite/tlslite/integration/AsyncStateMachine.py#L118-L135
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py
python
Listbox.activate
(self, index)
Activate item identified by INDEX.
Activate item identified by INDEX.
[ "Activate", "item", "identified", "by", "INDEX", "." ]
def activate(self, index): """Activate item identified by INDEX.""" self.tk.call(self._w, 'activate', index)
[ "def", "activate", "(", "self", ",", "index", ")", ":", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'activate'", ",", "index", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py#L2779-L2781
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/vim/chromium.ycm_extra_conf.py
python
GetClangCommandLineForNinjaOutput
(out_dir, build_target)
return None
Returns the Clang command line for building |build_target| Asks ninja for the list of commands used to build |filename| and returns the final Clang invocation. Args: out_dir: (String) Absolute path to ninja build output directory. build_target: (String) A build target understood by ninja Returns: (String or None) Clang command line or None if a Clang command line couldn't be determined.
Returns the Clang command line for building |build_target|
[ "Returns", "the", "Clang", "command", "line", "for", "building", "|build_target|" ]
def GetClangCommandLineForNinjaOutput(out_dir, build_target): """Returns the Clang command line for building |build_target| Asks ninja for the list of commands used to build |filename| and returns the final Clang invocation. Args: out_dir: (String) Absolute path to ninja build output directory. build_target: (String) A build target understood by ninja Returns: (String or None) Clang command line or None if a Clang command line couldn't be determined. """ p = subprocess.Popen(['ninja', '-v', '-C', out_dir, '-t', 'commands', build_target], stdout=subprocess.PIPE, universal_newlines=True) stdout, stderr = p.communicate() if p.returncode != 0: return None # Ninja will return multiple build steps for all dependencies up to # |build_target|. The build step we want is the last Clang invocation, which # is expected to be the one that outputs |build_target|. for line in reversed(stdout.split('\n')): if 'clang' in line: return line return None
[ "def", "GetClangCommandLineForNinjaOutput", "(", "out_dir", ",", "build_target", ")", ":", "p", "=", "subprocess", ".", "Popen", "(", "[", "'ninja'", ",", "'-v'", ",", "'-C'", ",", "out_dir", ",", "'-t'", ",", "'commands'", ",", "build_target", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ")", "stdout", ",", "stderr", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", "!=", "0", ":", "return", "None", "# Ninja will return multiple build steps for all dependencies up to", "# |build_target|. The build step we want is the last Clang invocation, which", "# is expected to be the one that outputs |build_target|.", "for", "line", "in", "reversed", "(", "stdout", ".", "split", "(", "'\\n'", ")", ")", ":", "if", "'clang'", "in", "line", ":", "return", "line", "return", "None" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/vim/chromium.ycm_extra_conf.py#L184-L211
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/sharded_variable.py
python
ShardedVariable._overload_operator
(cls, operator)
Delegate an operator overload to `ops.Tensor`.
Delegate an operator overload to `ops.Tensor`.
[ "Delegate", "an", "operator", "overload", "to", "ops", ".", "Tensor", "." ]
def _overload_operator(cls, operator): """Delegate an operator overload to `ops.Tensor`.""" tensor_operator = getattr(ops.Tensor, operator) def _operator(v, *args, **kwargs): return tensor_operator(_var_to_tensor(v), *args, **kwargs) setattr(cls, operator, _operator)
[ "def", "_overload_operator", "(", "cls", ",", "operator", ")", ":", "tensor_operator", "=", "getattr", "(", "ops", ".", "Tensor", ",", "operator", ")", "def", "_operator", "(", "v", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "tensor_operator", "(", "_var_to_tensor", "(", "v", ")", ",", "*", "args", ",", "*", "*", "kwargs", ")", "setattr", "(", "cls", ",", "operator", ",", "_operator", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/sharded_variable.py#L811-L818
OAID/Caffe-HRT
aae71e498ab842c6f92bcc23fc668423615a4d65
examples/pycaffe/layers/pascal_multilabel_datalayers.py
python
PascalMultilabelDataLayerSync.forward
(self, bottom, top)
Load data.
Load data.
[ "Load", "data", "." ]
def forward(self, bottom, top): """ Load data. """ for itt in range(self.batch_size): # Use the batch loader to load the next image. im, multilabel = self.batch_loader.load_next_image() # Add directly to the caffe data layer top[0].data[itt, ...] = im top[1].data[itt, ...] = multilabel
[ "def", "forward", "(", "self", ",", "bottom", ",", "top", ")", ":", "for", "itt", "in", "range", "(", "self", ".", "batch_size", ")", ":", "# Use the batch loader to load the next image.", "im", ",", "multilabel", "=", "self", ".", "batch_loader", ".", "load_next_image", "(", ")", "# Add directly to the caffe data layer", "top", "[", "0", "]", ".", "data", "[", "itt", ",", "...", "]", "=", "im", "top", "[", "1", "]", ".", "data", "[", "itt", ",", "...", "]", "=", "multilabel" ]
https://github.com/OAID/Caffe-HRT/blob/aae71e498ab842c6f92bcc23fc668423615a4d65/examples/pycaffe/layers/pascal_multilabel_datalayers.py#L55-L65
eranif/codelite
076eb332d6d2b7ea9a7654afa0461a01d91543aa
Runtime/gdb_printers/libstdcxx/v6/printers.py
python
TemplateTypePrinter.instantiate
(self)
return self._recognizer(self.name, self.defargs)
Return a recognizer object for this type printer.
Return a recognizer object for this type printer.
[ "Return", "a", "recognizer", "object", "for", "this", "type", "printer", "." ]
def instantiate(self): "Return a recognizer object for this type printer." return self._recognizer(self.name, self.defargs)
[ "def", "instantiate", "(", "self", ")", ":", "return", "self", ".", "_recognizer", "(", "self", ".", "name", ",", "self", ".", "defargs", ")" ]
https://github.com/eranif/codelite/blob/076eb332d6d2b7ea9a7654afa0461a01d91543aa/Runtime/gdb_printers/libstdcxx/v6/printers.py#L1459-L1461
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/data/experimental/ops/batching.py
python
map_and_batch
(map_func, batch_size, num_parallel_batches=None, drop_remainder=False, num_parallel_calls=None)
return _apply_fn
Fused implementation of `map` and `batch`. Maps `map_func` across `batch_size` consecutive elements of this dataset and then combines them into a batch. Functionally, it is equivalent to `map` followed by `batch`. However, by fusing the two transformations together, the implementation can be more efficient. Surfacing this transformation in the API is temporary. Once automatic input pipeline optimization is implemented, the fusing of `map` and `batch` will happen automatically and this API will be deprecated. Args: map_func: A function mapping a nested structure of tensors to another nested structure of tensors. batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. num_parallel_batches: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the number of batches to create in parallel. On one hand, higher values can help mitigate the effect of stragglers. On the other hand, higher values can increase contention if CPU is scarce. drop_remainder: (Optional.) A `tf.bool` scalar `tf.Tensor`, representing whether the last batch should be dropped in case its size is smaller than desired; the default behavior is not to drop the smaller batch. num_parallel_calls: (Optional.) A `tf.int32` scalar `tf.Tensor`, representing the number of elements to process in parallel. If not specified, `batch_size * num_parallel_batches` elements will be processed in parallel. If the value `tf.data.experimental.AUTOTUNE` is used, then the number of parallel calls is set dynamically based on available CPU. Returns: A `Dataset` transformation function, which can be passed to `tf.data.Dataset.apply`. Raises: ValueError: If both `num_parallel_batches` and `num_parallel_calls` are specified.
Fused implementation of `map` and `batch`.
[ "Fused", "implementation", "of", "map", "and", "batch", "." ]
def map_and_batch(map_func, batch_size, num_parallel_batches=None, drop_remainder=False, num_parallel_calls=None): """Fused implementation of `map` and `batch`. Maps `map_func` across `batch_size` consecutive elements of this dataset and then combines them into a batch. Functionally, it is equivalent to `map` followed by `batch`. However, by fusing the two transformations together, the implementation can be more efficient. Surfacing this transformation in the API is temporary. Once automatic input pipeline optimization is implemented, the fusing of `map` and `batch` will happen automatically and this API will be deprecated. Args: map_func: A function mapping a nested structure of tensors to another nested structure of tensors. batch_size: A `tf.int64` scalar `tf.Tensor`, representing the number of consecutive elements of this dataset to combine in a single batch. num_parallel_batches: (Optional.) A `tf.int64` scalar `tf.Tensor`, representing the number of batches to create in parallel. On one hand, higher values can help mitigate the effect of stragglers. On the other hand, higher values can increase contention if CPU is scarce. drop_remainder: (Optional.) A `tf.bool` scalar `tf.Tensor`, representing whether the last batch should be dropped in case its size is smaller than desired; the default behavior is not to drop the smaller batch. num_parallel_calls: (Optional.) A `tf.int32` scalar `tf.Tensor`, representing the number of elements to process in parallel. If not specified, `batch_size * num_parallel_batches` elements will be processed in parallel. If the value `tf.data.experimental.AUTOTUNE` is used, then the number of parallel calls is set dynamically based on available CPU. Returns: A `Dataset` transformation function, which can be passed to `tf.data.Dataset.apply`. Raises: ValueError: If both `num_parallel_batches` and `num_parallel_calls` are specified. """ if num_parallel_batches is None and num_parallel_calls is None: num_parallel_calls = batch_size elif num_parallel_batches is not None and num_parallel_calls is None: num_parallel_calls = batch_size * num_parallel_batches elif num_parallel_batches is not None and num_parallel_calls is not None: raise ValueError("The `num_parallel_batches` and `num_parallel_calls` " "arguments are mutually exclusive.") def _apply_fn(dataset): return _MapAndBatchDataset(dataset, map_func, batch_size, num_parallel_calls, drop_remainder) return _apply_fn
[ "def", "map_and_batch", "(", "map_func", ",", "batch_size", ",", "num_parallel_batches", "=", "None", ",", "drop_remainder", "=", "False", ",", "num_parallel_calls", "=", "None", ")", ":", "if", "num_parallel_batches", "is", "None", "and", "num_parallel_calls", "is", "None", ":", "num_parallel_calls", "=", "batch_size", "elif", "num_parallel_batches", "is", "not", "None", "and", "num_parallel_calls", "is", "None", ":", "num_parallel_calls", "=", "batch_size", "*", "num_parallel_batches", "elif", "num_parallel_batches", "is", "not", "None", "and", "num_parallel_calls", "is", "not", "None", ":", "raise", "ValueError", "(", "\"The `num_parallel_batches` and `num_parallel_calls` \"", "\"arguments are mutually exclusive.\"", ")", "def", "_apply_fn", "(", "dataset", ")", ":", "return", "_MapAndBatchDataset", "(", "dataset", ",", "map_func", ",", "batch_size", ",", "num_parallel_calls", ",", "drop_remainder", ")", "return", "_apply_fn" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/data/experimental/ops/batching.py#L145-L199
turi-code/SFrame
796b9bdfb2fa1b881d82080754643c7e68629cd2
oss_src/unity/python/sframe/meta/__init__.py
python
decompile
(code, mode='exec')
Decompile a code object into python ast. :param mode: must be 'exec' to compile a module or 'eval' to compile an expression.
Decompile a code object into python ast. :param mode: must be 'exec' to compile a module or 'eval' to compile an expression.
[ "Decompile", "a", "code", "object", "into", "python", "ast", ".", ":", "param", "mode", ":", "must", "be", "exec", "to", "compile", "a", "module", "or", "eval", "to", "compile", "an", "expression", "." ]
def decompile(code, mode='exec'): ''' Decompile a code object into python ast. :param mode: must be 'exec' to compile a module or 'eval' to compile an expression. ''' if mode == 'exec': return make_module(code) else: raise Exception("can not handle mode %r yet" % mode)
[ "def", "decompile", "(", "code", ",", "mode", "=", "'exec'", ")", ":", "if", "mode", "==", "'exec'", ":", "return", "make_module", "(", "code", ")", "else", ":", "raise", "Exception", "(", "\"can not handle mode %r yet\"", "%", "mode", ")" ]
https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/meta/__init__.py#L5-L15
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/html.py
python
HtmlTag.GetEndPos1
(*args, **kwargs)
return _html.HtmlTag_GetEndPos1(*args, **kwargs)
GetEndPos1(self) -> int
GetEndPos1(self) -> int
[ "GetEndPos1", "(", "self", ")", "-", ">", "int" ]
def GetEndPos1(*args, **kwargs): """GetEndPos1(self) -> int""" return _html.HtmlTag_GetEndPos1(*args, **kwargs)
[ "def", "GetEndPos1", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlTag_GetEndPos1", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/html.py#L165-L167
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/eclib/finddlg.py
python
FindPanel.SetFlag
(self, flag)
Set a search flag @param flag: AFR_* flag value
Set a search flag @param flag: AFR_* flag value
[ "Set", "a", "search", "flag", "@param", "flag", ":", "AFR_", "*", "flag", "value" ]
def SetFlag(self, flag): """Set a search flag @param flag: AFR_* flag value """ flags = self._fdata.GetFlags() flags |= flag self.SetFlags(flags)
[ "def", "SetFlag", "(", "self", ",", "flag", ")", ":", "flags", "=", "self", ".", "_fdata", ".", "GetFlags", "(", ")", "flags", "|=", "flag", "self", ".", "SetFlags", "(", "flags", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/finddlg.py#L1246-L1253
cmu-db/noisepage
79276e68fe83322f1249e8a8be96bd63c583ae56
script/self_driving/forecasting/data_loader.py
python
DataLoader._to_timeseries
(self, data: np.ndarray)
Convert the 2D array with query id and timestamps into a map of time-series for each query id :param data: Loaded 2D numpy array of [query_id, timestamp] :return: None
Convert the 2D array with query id and timestamps into a map of time-series for each query id :param data: Loaded 2D numpy array of [query_id, timestamp] :return: None
[ "Convert", "the", "2D", "array", "with", "query", "id", "and", "timestamps", "into", "a", "map", "of", "time", "-", "series", "for", "each", "query", "id", ":", "param", "data", ":", "Loaded", "2D", "numpy", "array", "of", "[", "query_id", "timestamp", "]", ":", "return", ":", "None" ]
def _to_timeseries(self, data: np.ndarray) -> None: """ Convert the 2D array with query id and timestamps into a map of time-series for each query id :param data: Loaded 2D numpy array of [query_id, timestamp] :return: None """ # Query trace file is sorted by timestamps start_timestamp = data[0][self.TS_IDX] end_timestamp = data[-1][self.TS_IDX] if end_timestamp - start_timestamp <= 1: raise ValueError( "Empty data set with start timestamp >= end timestamp.") # Number of data points in the new time-series num_buckets = (end_timestamp - start_timestamp - 1) // self._interval_us + 1 # Iterate through the timestamps self._ts_data = {} for i in range(len(data)): t = data[i][self.TS_IDX] qid = data[i][self.QID_IDX] # Initialize a new query's time-series if self._ts_data.get(qid) is None: self._ts_data[qid] = np.zeros(num_buckets) # Bucket index bi = (t - start_timestamp) // self._interval_us self._ts_data[qid][bi] += 1
[ "def", "_to_timeseries", "(", "self", ",", "data", ":", "np", ".", "ndarray", ")", "->", "None", ":", "# Query trace file is sorted by timestamps", "start_timestamp", "=", "data", "[", "0", "]", "[", "self", ".", "TS_IDX", "]", "end_timestamp", "=", "data", "[", "-", "1", "]", "[", "self", ".", "TS_IDX", "]", "if", "end_timestamp", "-", "start_timestamp", "<=", "1", ":", "raise", "ValueError", "(", "\"Empty data set with start timestamp >= end timestamp.\"", ")", "# Number of data points in the new time-series", "num_buckets", "=", "(", "end_timestamp", "-", "start_timestamp", "-", "1", ")", "//", "self", ".", "_interval_us", "+", "1", "# Iterate through the timestamps", "self", ".", "_ts_data", "=", "{", "}", "for", "i", "in", "range", "(", "len", "(", "data", ")", ")", ":", "t", "=", "data", "[", "i", "]", "[", "self", ".", "TS_IDX", "]", "qid", "=", "data", "[", "i", "]", "[", "self", ".", "QID_IDX", "]", "# Initialize a new query's time-series", "if", "self", ".", "_ts_data", ".", "get", "(", "qid", ")", "is", "None", ":", "self", ".", "_ts_data", "[", "qid", "]", "=", "np", ".", "zeros", "(", "num_buckets", ")", "# Bucket index", "bi", "=", "(", "t", "-", "start_timestamp", ")", "//", "self", ".", "_interval_us", "self", ".", "_ts_data", "[", "qid", "]", "[", "bi", "]", "+=", "1" ]
https://github.com/cmu-db/noisepage/blob/79276e68fe83322f1249e8a8be96bd63c583ae56/script/self_driving/forecasting/data_loader.py#L60-L90
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py
python
TreeWalker.doctype
(self, name, publicId=None, systemId=None)
return {"type": "Doctype", "name": name, "publicId": publicId, "systemId": systemId}
Generates a Doctype token :arg name: :arg publicId: :arg systemId: :returns: the Doctype token
Generates a Doctype token
[ "Generates", "a", "Doctype", "token" ]
def doctype(self, name, publicId=None, systemId=None): """Generates a Doctype token :arg name: :arg publicId: :arg systemId: :returns: the Doctype token """ return {"type": "Doctype", "name": name, "publicId": publicId, "systemId": systemId}
[ "def", "doctype", "(", "self", ",", "name", ",", "publicId", "=", "None", ",", "systemId", "=", "None", ")", ":", "return", "{", "\"type\"", ":", "\"Doctype\"", ",", "\"name\"", ":", "name", ",", "\"publicId\"", ":", "publicId", ",", "\"systemId\"", ":", "systemId", "}" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/pip/_vendor/html5lib/treewalkers/base.py#L148-L163
shedskin/shedskin
ae88dbca7b1d9671cd8be448cb0b497122758936
examples/linalg.py
python
inner_prod
(v1, v2)
return sum
inner production of two vectors.
inner production of two vectors.
[ "inner", "production", "of", "two", "vectors", "." ]
def inner_prod(v1, v2): 'inner production of two vectors.' sum = 0 for i in xrange(len(v1)): sum += v1[i] * v2[i] return sum
[ "def", "inner_prod", "(", "v1", ",", "v2", ")", ":", "sum", "=", "0", "for", "i", "in", "xrange", "(", "len", "(", "v1", ")", ")", ":", "sum", "+=", "v1", "[", "i", "]", "*", "v2", "[", "i", "]", "return", "sum" ]
https://github.com/shedskin/shedskin/blob/ae88dbca7b1d9671cd8be448cb0b497122758936/examples/linalg.py#L7-L12
kevinlin311tw/caffe-cvprw15
45c2a1bf0368569c54e0be4edf8d34285cf79e70
scripts/cpp_lint.py
python
Search
(pattern, s)
return _regexp_compile_cache[pattern].search(s)
Searches the string for the pattern, caching the compiled regexp.
Searches the string for the pattern, caching the compiled regexp.
[ "Searches", "the", "string", "for", "the", "pattern", "caching", "the", "compiled", "regexp", "." ]
def Search(pattern, s): """Searches the string for the pattern, caching the compiled regexp.""" if pattern not in _regexp_compile_cache: _regexp_compile_cache[pattern] = sre_compile.compile(pattern) return _regexp_compile_cache[pattern].search(s)
[ "def", "Search", "(", "pattern", ",", "s", ")", ":", "if", "pattern", "not", "in", "_regexp_compile_cache", ":", "_regexp_compile_cache", "[", "pattern", "]", "=", "sre_compile", ".", "compile", "(", "pattern", ")", "return", "_regexp_compile_cache", "[", "pattern", "]", ".", "search", "(", "s", ")" ]
https://github.com/kevinlin311tw/caffe-cvprw15/blob/45c2a1bf0368569c54e0be4edf8d34285cf79e70/scripts/cpp_lint.py#L543-L547
intel/llvm
e6d0547e9d99b5a56430c4749f6c7e328bf221ab
clang/docs/tools/dump_ast_matchers.py
python
act_on_decl
(declaration, comment, allowed_types)
Parse the matcher out of the given declaration and comment. If 'allowed_types' is set, it contains a list of node types the matcher can match on, as extracted from the static type asserts in the matcher definition.
Parse the matcher out of the given declaration and comment.
[ "Parse", "the", "matcher", "out", "of", "the", "given", "declaration", "and", "comment", "." ]
def act_on_decl(declaration, comment, allowed_types): """Parse the matcher out of the given declaration and comment. If 'allowed_types' is set, it contains a list of node types the matcher can match on, as extracted from the static type asserts in the matcher definition. """ if declaration.strip(): if re.match(r'^\s?(#|namespace|using)', declaration): return # Node matchers are defined by writing: # VariadicDynCastAllOfMatcher<ResultType, ArgumentType> name; m = re.match(r""".*Variadic(?:DynCast)?AllOfMatcher\s*< \s*([^\s,]+)\s*(?:, \s*([^\s>]+)\s*)?> \s*([^\s;]+)\s*;\s*$""", declaration, flags=re.X) if m: result, inner, name = m.groups() if not inner: inner = result add_matcher(result, name, 'Matcher<%s>...' % inner, comment, is_dyncast=True) return # Special case of type matchers: # AstTypeMatcher<ArgumentType> name m = re.match(r""".*AstTypeMatcher\s*< \s*([^\s>]+)\s*> \s*([^\s;]+)\s*;\s*$""", declaration, flags=re.X) if m: inner, name = m.groups() add_matcher('Type', name, 'Matcher<%s>...' % inner, comment, is_dyncast=True) # FIXME: re-enable once we have implemented casting on the TypeLoc # hierarchy. # add_matcher('TypeLoc', '%sLoc' % name, 'Matcher<%sLoc>...' % inner, # comment, is_dyncast=True) return # Parse the various matcher definition macros. m = re.match(""".*AST_TYPE(LOC)?_TRAVERSE_MATCHER(?:_DECL)?\( \s*([^\s,]+\s*), \s*(?:[^\s,]+\s*), \s*AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\) \)\s*;\s*$""", declaration, flags=re.X) if m: loc, name, results = m.groups()[0:3] result_types = [r.strip() for r in results.split(',')] comment_result_types = extract_result_types(comment) if (comment_result_types and sorted(result_types) != sorted(comment_result_types)): raise Exception('Inconsistent documentation for: %s' % name) for result_type in result_types: add_matcher(result_type, name, 'Matcher<Type>', comment) # if loc: # add_matcher('%sLoc' % result_type, '%sLoc' % name, 'Matcher<TypeLoc>', # comment) return m = re.match(r"""^\s*AST_POLYMORPHIC_MATCHER(_P)?(.?)(?:_OVERLOAD)?\( \s*([^\s,]+)\s*, \s*AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\) (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: p, n, name, results = m.groups()[0:4] args = m.groups()[4:] result_types = [r.strip() for r in results.split(',')] if allowed_types and allowed_types != result_types: raise Exception('Inconsistent documentation for: %s' % name) if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) for result_type in result_types: add_matcher(result_type, name, args, comment) return m = re.match(r"""^\s*AST_POLYMORPHIC_MATCHER_REGEX(?:_OVERLOAD)?\( \s*([^\s,]+)\s*, \s*AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\), \s*([^\s,]+)\s* (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: name, results, arg_name = m.groups()[0:3] result_types = [r.strip() for r in results.split(',')] if allowed_types and allowed_types != result_types: raise Exception('Inconsistent documentation for: %s' % name) arg = "StringRef %s, Regex::RegexFlags Flags = NoFlags" % arg_name comment += """ If the matcher is used in clang-query, RegexFlags parameter should be passed as a quoted string. e.g: "NoFlags". Flags can be combined with '|' example \"IgnoreCase | BasicRegex\" """ for result_type in result_types: add_matcher(result_type, name, arg, comment) return m = re.match(r"""^\s*AST_MATCHER_FUNCTION(_P)?(.?)(?:_OVERLOAD)?\( (?:\s*([^\s,]+)\s*,)? \s*([^\s,]+)\s* (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{\s*$""", declaration, flags=re.X) if m: p, n, result, name = m.groups()[0:4] args = m.groups()[4:] if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) add_matcher(result, name, args, comment) return m = re.match(r"""^\s*AST_MATCHER(_P)?(.?)(?:_OVERLOAD)?\( (?:\s*([^\s,]+)\s*,)? \s*([^\s,]+)\s* (?:,\s*([^,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*([^\s,]+)\s* ,\s*([^\s,]+)\s*)? (?:,\s*\d+\s*)? \)\s*{""", declaration, flags=re.X) if m: p, n, result, name = m.groups()[0:4] args = m.groups()[4:] if not result: if not allowed_types: raise Exception('Did not find allowed result types for: %s' % name) result_types = allowed_types else: result_types = [result] if n not in ['', '2']: raise Exception('Cannot parse "%s"' % declaration) args = ', '.join('%s %s' % (args[i], args[i+1]) for i in range(0, len(args), 2) if args[i]) for result_type in result_types: add_matcher(result_type, name, args, comment) return m = re.match(r"""^\s*AST_MATCHER_REGEX(?:_OVERLOAD)?\( \s*([^\s,]+)\s*, \s*([^\s,]+)\s*, \s*([^\s,]+)\s* (?:,\s*\d+\s*)? \)\s*{""", declaration, flags=re.X) if m: result, name, arg_name = m.groups()[0:3] if not result: if not allowed_types: raise Exception('Did not find allowed result types for: %s' % name) result_types = allowed_types else: result_types = [result] arg = "StringRef %s, Regex::RegexFlags Flags = NoFlags" % arg_name comment += """ If the matcher is used in clang-query, RegexFlags parameter should be passed as a quoted string. e.g: "NoFlags". Flags can be combined with '|' example \"IgnoreCase | BasicRegex\" """ for result_type in result_types: add_matcher(result_type, name, arg, comment) return # Parse ArgumentAdapting matchers. m = re.match( r"""^.*ArgumentAdaptingMatcherFunc<.*>\s* ([a-zA-Z]*);$""", declaration, flags=re.X) if m: name = m.groups()[0] add_matcher('*', name, 'Matcher<*>', comment) return # Parse Variadic functions. m = re.match( r"""^.*internal::VariadicFunction\s*<\s*([^,]+),\s*([^,]+),\s*[^>]+>\s* ([a-zA-Z]*);$""", declaration, flags=re.X) if m: result, arg, name = m.groups()[:3] add_matcher(result, name, '%s, ..., %s' % (arg, arg), comment) return m = re.match( r"""^.*internal::VariadicFunction\s*<\s* internal::PolymorphicMatcher<[\S\s]+ AST_POLYMORPHIC_SUPPORTED_TYPES\(([^)]*)\),\s*(.*);$""", declaration, flags=re.X) if m: results, trailing = m.groups() trailing, name = trailing.rsplit(">", 1) name = name.strip() trailing, _ = trailing.rsplit(",", 1) _, arg = trailing.rsplit(",", 1) arg = arg.strip() result_types = [r.strip() for r in results.split(',')] for result_type in result_types: add_matcher(result_type, name, '%s, ..., %s' % (arg, arg), comment) return # Parse Variadic operator matchers. m = re.match( r"""^.*VariadicOperatorMatcherFunc\s*<\s*([^,]+),\s*([^\s]+)\s*>\s* ([a-zA-Z]*);$""", declaration, flags=re.X) if m: min_args, max_args, name = m.groups()[:3] if max_args == '1': add_matcher('*', name, 'Matcher<*>', comment) return elif max_args == 'std::numeric_limits<unsigned>::max()': add_matcher('*', name, 'Matcher<*>, ..., Matcher<*>', comment) return m = re.match( r"""^.*MapAnyOfMatcher<.*>\s* ([a-zA-Z]*);$""", declaration, flags=re.X) if m: name = m.groups()[0] add_matcher('*', name, 'Matcher<*>...Matcher<*>', comment) return # Parse free standing matcher functions, like: # Matcher<ResultType> Name(Matcher<ArgumentType> InnerMatcher) { m = re.match(r"""^\s*(?:template\s+<\s*(?:class|typename)\s+(.+)\s*>\s+)? (.*)\s+ ([^\s\(]+)\s*\( (.*) \)\s*{""", declaration, re.X) if m: template_name, result, name, args = m.groups() if template_name: matcherTemplateArgs = re.findall(r'Matcher<\s*(%s)\s*>' % template_name, args) templateArgs = re.findall(r'(?:^|[\s,<])(%s)(?:$|[\s,>])' % template_name, args) if len(matcherTemplateArgs) < len(templateArgs): # The template name is used naked, so don't replace with `*`` later on template_name = None else : args = re.sub(r'(^|[\s,<])%s($|[\s,>])' % template_name, r'\1*\2', args) args = ', '.join(p.strip() for p in args.split(',')) m = re.match(r'(?:^|.*\s+)internal::(?:Bindable)?Matcher<([^>]+)>$', result) if m: result_types = [m.group(1)] if template_name and len(result_types) is 1 and result_types[0] == template_name: result_types = ['*'] else: result_types = extract_result_types(comment) if not result_types: if not comment: # Only overloads don't have their own doxygen comments; ignore those. print('Ignoring "%s"' % name) else: print('Cannot determine result type for "%s"' % name) else: for result_type in result_types: add_matcher(result_type, name, args, comment) else: print('*** Unparsable: "' + declaration + '" ***')
[ "def", "act_on_decl", "(", "declaration", ",", "comment", ",", "allowed_types", ")", ":", "if", "declaration", ".", "strip", "(", ")", ":", "if", "re", ".", "match", "(", "r'^\\s?(#|namespace|using)'", ",", "declaration", ")", ":", "return", "# Node matchers are defined by writing:", "# VariadicDynCastAllOfMatcher<ResultType, ArgumentType> name;", "m", "=", "re", ".", "match", "(", "r\"\"\".*Variadic(?:DynCast)?AllOfMatcher\\s*<\n \\s*([^\\s,]+)\\s*(?:,\n \\s*([^\\s>]+)\\s*)?>\n \\s*([^\\s;]+)\\s*;\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "result", ",", "inner", ",", "name", "=", "m", ".", "groups", "(", ")", "if", "not", "inner", ":", "inner", "=", "result", "add_matcher", "(", "result", ",", "name", ",", "'Matcher<%s>...'", "%", "inner", ",", "comment", ",", "is_dyncast", "=", "True", ")", "return", "# Special case of type matchers:", "# AstTypeMatcher<ArgumentType> name", "m", "=", "re", ".", "match", "(", "r\"\"\".*AstTypeMatcher\\s*<\n \\s*([^\\s>]+)\\s*>\n \\s*([^\\s;]+)\\s*;\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "inner", ",", "name", "=", "m", ".", "groups", "(", ")", "add_matcher", "(", "'Type'", ",", "name", ",", "'Matcher<%s>...'", "%", "inner", ",", "comment", ",", "is_dyncast", "=", "True", ")", "# FIXME: re-enable once we have implemented casting on the TypeLoc", "# hierarchy.", "# add_matcher('TypeLoc', '%sLoc' % name, 'Matcher<%sLoc>...' % inner,", "# comment, is_dyncast=True)", "return", "# Parse the various matcher definition macros.", "m", "=", "re", ".", "match", "(", "\"\"\".*AST_TYPE(LOC)?_TRAVERSE_MATCHER(?:_DECL)?\\(\n \\s*([^\\s,]+\\s*),\n \\s*(?:[^\\s,]+\\s*),\n \\s*AST_POLYMORPHIC_SUPPORTED_TYPES\\(([^)]*)\\)\n \\)\\s*;\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "loc", ",", "name", ",", "results", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "3", "]", "result_types", "=", "[", "r", ".", "strip", "(", ")", "for", "r", "in", "results", ".", "split", "(", "','", ")", "]", "comment_result_types", "=", "extract_result_types", "(", "comment", ")", "if", "(", "comment_result_types", "and", "sorted", "(", "result_types", ")", "!=", "sorted", "(", "comment_result_types", ")", ")", ":", "raise", "Exception", "(", "'Inconsistent documentation for: %s'", "%", "name", ")", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "'Matcher<Type>'", ",", "comment", ")", "# if loc:", "# add_matcher('%sLoc' % result_type, '%sLoc' % name, 'Matcher<TypeLoc>',", "# comment)", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*AST_POLYMORPHIC_MATCHER(_P)?(.?)(?:_OVERLOAD)?\\(\n \\s*([^\\s,]+)\\s*,\n \\s*AST_POLYMORPHIC_SUPPORTED_TYPES\\(([^)]*)\\)\n (?:,\\s*([^\\s,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*([^\\s,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*\\d+\\s*)?\n \\)\\s*{\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "p", ",", "n", ",", "name", ",", "results", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "4", "]", "args", "=", "m", ".", "groups", "(", ")", "[", "4", ":", "]", "result_types", "=", "[", "r", ".", "strip", "(", ")", "for", "r", "in", "results", ".", "split", "(", "','", ")", "]", "if", "allowed_types", "and", "allowed_types", "!=", "result_types", ":", "raise", "Exception", "(", "'Inconsistent documentation for: %s'", "%", "name", ")", "if", "n", "not", "in", "[", "''", ",", "'2'", "]", ":", "raise", "Exception", "(", "'Cannot parse \"%s\"'", "%", "declaration", ")", "args", "=", "', '", ".", "join", "(", "'%s %s'", "%", "(", "args", "[", "i", "]", ",", "args", "[", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "args", ")", ",", "2", ")", "if", "args", "[", "i", "]", ")", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "args", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*AST_POLYMORPHIC_MATCHER_REGEX(?:_OVERLOAD)?\\(\n \\s*([^\\s,]+)\\s*,\n \\s*AST_POLYMORPHIC_SUPPORTED_TYPES\\(([^)]*)\\),\n \\s*([^\\s,]+)\\s*\n (?:,\\s*\\d+\\s*)?\n \\)\\s*{\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "name", ",", "results", ",", "arg_name", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "3", "]", "result_types", "=", "[", "r", ".", "strip", "(", ")", "for", "r", "in", "results", ".", "split", "(", "','", ")", "]", "if", "allowed_types", "and", "allowed_types", "!=", "result_types", ":", "raise", "Exception", "(", "'Inconsistent documentation for: %s'", "%", "name", ")", "arg", "=", "\"StringRef %s, Regex::RegexFlags Flags = NoFlags\"", "%", "arg_name", "comment", "+=", "\"\"\"\nIf the matcher is used in clang-query, RegexFlags parameter\nshould be passed as a quoted string. e.g: \"NoFlags\".\nFlags can be combined with '|' example \\\"IgnoreCase | BasicRegex\\\"\n\"\"\"", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "arg", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*AST_MATCHER_FUNCTION(_P)?(.?)(?:_OVERLOAD)?\\(\n (?:\\s*([^\\s,]+)\\s*,)?\n \\s*([^\\s,]+)\\s*\n (?:,\\s*([^\\s,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*([^\\s,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*\\d+\\s*)?\n \\)\\s*{\\s*$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "p", ",", "n", ",", "result", ",", "name", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "4", "]", "args", "=", "m", ".", "groups", "(", ")", "[", "4", ":", "]", "if", "n", "not", "in", "[", "''", ",", "'2'", "]", ":", "raise", "Exception", "(", "'Cannot parse \"%s\"'", "%", "declaration", ")", "args", "=", "', '", ".", "join", "(", "'%s %s'", "%", "(", "args", "[", "i", "]", ",", "args", "[", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "args", ")", ",", "2", ")", "if", "args", "[", "i", "]", ")", "add_matcher", "(", "result", ",", "name", ",", "args", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*AST_MATCHER(_P)?(.?)(?:_OVERLOAD)?\\(\n (?:\\s*([^\\s,]+)\\s*,)?\n \\s*([^\\s,]+)\\s*\n (?:,\\s*([^,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*([^\\s,]+)\\s*\n ,\\s*([^\\s,]+)\\s*)?\n (?:,\\s*\\d+\\s*)?\n \\)\\s*{\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "p", ",", "n", ",", "result", ",", "name", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "4", "]", "args", "=", "m", ".", "groups", "(", ")", "[", "4", ":", "]", "if", "not", "result", ":", "if", "not", "allowed_types", ":", "raise", "Exception", "(", "'Did not find allowed result types for: %s'", "%", "name", ")", "result_types", "=", "allowed_types", "else", ":", "result_types", "=", "[", "result", "]", "if", "n", "not", "in", "[", "''", ",", "'2'", "]", ":", "raise", "Exception", "(", "'Cannot parse \"%s\"'", "%", "declaration", ")", "args", "=", "', '", ".", "join", "(", "'%s %s'", "%", "(", "args", "[", "i", "]", ",", "args", "[", "i", "+", "1", "]", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "args", ")", ",", "2", ")", "if", "args", "[", "i", "]", ")", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "args", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*AST_MATCHER_REGEX(?:_OVERLOAD)?\\(\n \\s*([^\\s,]+)\\s*,\n \\s*([^\\s,]+)\\s*,\n \\s*([^\\s,]+)\\s*\n (?:,\\s*\\d+\\s*)?\n \\)\\s*{\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "result", ",", "name", ",", "arg_name", "=", "m", ".", "groups", "(", ")", "[", "0", ":", "3", "]", "if", "not", "result", ":", "if", "not", "allowed_types", ":", "raise", "Exception", "(", "'Did not find allowed result types for: %s'", "%", "name", ")", "result_types", "=", "allowed_types", "else", ":", "result_types", "=", "[", "result", "]", "arg", "=", "\"StringRef %s, Regex::RegexFlags Flags = NoFlags\"", "%", "arg_name", "comment", "+=", "\"\"\"\nIf the matcher is used in clang-query, RegexFlags parameter\nshould be passed as a quoted string. e.g: \"NoFlags\".\nFlags can be combined with '|' example \\\"IgnoreCase | BasicRegex\\\"\n\"\"\"", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "arg", ",", "comment", ")", "return", "# Parse ArgumentAdapting matchers.", "m", "=", "re", ".", "match", "(", "r\"\"\"^.*ArgumentAdaptingMatcherFunc<.*>\\s*\n ([a-zA-Z]*);$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "name", "=", "m", ".", "groups", "(", ")", "[", "0", "]", "add_matcher", "(", "'*'", ",", "name", ",", "'Matcher<*>'", ",", "comment", ")", "return", "# Parse Variadic functions.", "m", "=", "re", ".", "match", "(", "r\"\"\"^.*internal::VariadicFunction\\s*<\\s*([^,]+),\\s*([^,]+),\\s*[^>]+>\\s*\n ([a-zA-Z]*);$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "result", ",", "arg", ",", "name", "=", "m", ".", "groups", "(", ")", "[", ":", "3", "]", "add_matcher", "(", "result", ",", "name", ",", "'%s, ..., %s'", "%", "(", "arg", ",", "arg", ")", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^.*internal::VariadicFunction\\s*<\\s*\n internal::PolymorphicMatcher<[\\S\\s]+\n AST_POLYMORPHIC_SUPPORTED_TYPES\\(([^)]*)\\),\\s*(.*);$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "results", ",", "trailing", "=", "m", ".", "groups", "(", ")", "trailing", ",", "name", "=", "trailing", ".", "rsplit", "(", "\">\"", ",", "1", ")", "name", "=", "name", ".", "strip", "(", ")", "trailing", ",", "_", "=", "trailing", ".", "rsplit", "(", "\",\"", ",", "1", ")", "_", ",", "arg", "=", "trailing", ".", "rsplit", "(", "\",\"", ",", "1", ")", "arg", "=", "arg", ".", "strip", "(", ")", "result_types", "=", "[", "r", ".", "strip", "(", ")", "for", "r", "in", "results", ".", "split", "(", "','", ")", "]", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "'%s, ..., %s'", "%", "(", "arg", ",", "arg", ")", ",", "comment", ")", "return", "# Parse Variadic operator matchers.", "m", "=", "re", ".", "match", "(", "r\"\"\"^.*VariadicOperatorMatcherFunc\\s*<\\s*([^,]+),\\s*([^\\s]+)\\s*>\\s*\n ([a-zA-Z]*);$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "min_args", ",", "max_args", ",", "name", "=", "m", ".", "groups", "(", ")", "[", ":", "3", "]", "if", "max_args", "==", "'1'", ":", "add_matcher", "(", "'*'", ",", "name", ",", "'Matcher<*>'", ",", "comment", ")", "return", "elif", "max_args", "==", "'std::numeric_limits<unsigned>::max()'", ":", "add_matcher", "(", "'*'", ",", "name", ",", "'Matcher<*>, ..., Matcher<*>'", ",", "comment", ")", "return", "m", "=", "re", ".", "match", "(", "r\"\"\"^.*MapAnyOfMatcher<.*>\\s*\n ([a-zA-Z]*);$\"\"\"", ",", "declaration", ",", "flags", "=", "re", ".", "X", ")", "if", "m", ":", "name", "=", "m", ".", "groups", "(", ")", "[", "0", "]", "add_matcher", "(", "'*'", ",", "name", ",", "'Matcher<*>...Matcher<*>'", ",", "comment", ")", "return", "# Parse free standing matcher functions, like:", "# Matcher<ResultType> Name(Matcher<ArgumentType> InnerMatcher) {", "m", "=", "re", ".", "match", "(", "r\"\"\"^\\s*(?:template\\s+<\\s*(?:class|typename)\\s+(.+)\\s*>\\s+)? \n (.*)\\s+\n ([^\\s\\(]+)\\s*\\(\n (.*)\n \\)\\s*{\"\"\"", ",", "declaration", ",", "re", ".", "X", ")", "if", "m", ":", "template_name", ",", "result", ",", "name", ",", "args", "=", "m", ".", "groups", "(", ")", "if", "template_name", ":", "matcherTemplateArgs", "=", "re", ".", "findall", "(", "r'Matcher<\\s*(%s)\\s*>'", "%", "template_name", ",", "args", ")", "templateArgs", "=", "re", ".", "findall", "(", "r'(?:^|[\\s,<])(%s)(?:$|[\\s,>])'", "%", "template_name", ",", "args", ")", "if", "len", "(", "matcherTemplateArgs", ")", "<", "len", "(", "templateArgs", ")", ":", "# The template name is used naked, so don't replace with `*`` later on", "template_name", "=", "None", "else", ":", "args", "=", "re", ".", "sub", "(", "r'(^|[\\s,<])%s($|[\\s,>])'", "%", "template_name", ",", "r'\\1*\\2'", ",", "args", ")", "args", "=", "', '", ".", "join", "(", "p", ".", "strip", "(", ")", "for", "p", "in", "args", ".", "split", "(", "','", ")", ")", "m", "=", "re", ".", "match", "(", "r'(?:^|.*\\s+)internal::(?:Bindable)?Matcher<([^>]+)>$'", ",", "result", ")", "if", "m", ":", "result_types", "=", "[", "m", ".", "group", "(", "1", ")", "]", "if", "template_name", "and", "len", "(", "result_types", ")", "is", "1", "and", "result_types", "[", "0", "]", "==", "template_name", ":", "result_types", "=", "[", "'*'", "]", "else", ":", "result_types", "=", "extract_result_types", "(", "comment", ")", "if", "not", "result_types", ":", "if", "not", "comment", ":", "# Only overloads don't have their own doxygen comments; ignore those.", "print", "(", "'Ignoring \"%s\"'", "%", "name", ")", "else", ":", "print", "(", "'Cannot determine result type for \"%s\"'", "%", "name", ")", "else", ":", "for", "result_type", "in", "result_types", ":", "add_matcher", "(", "result_type", ",", "name", ",", "args", ",", "comment", ")", "else", ":", "print", "(", "'*** Unparsable: \"'", "+", "declaration", "+", "'\" ***'", ")" ]
https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/clang/docs/tools/dump_ast_matchers.py#L164-L439
rootm0s/Protectors
5b3f4d11687a5955caf9c3af30666c4bfc2c19ab
OWASP-ZSC/module/readline_windows/pyreadline/modes/notemacs.py
python
NotEmacsMode.next_history
(self, e)
Move forward through the history list, fetching the next command.
Move forward through the history list, fetching the next command.
[ "Move", "forward", "through", "the", "history", "list", "fetching", "the", "next", "command", "." ]
def next_history(self, e): # (C-n) '''Move forward through the history list, fetching the next command. ''' self._history.next_history(self.l_buffer)
[ "def", "next_history", "(", "self", ",", "e", ")", ":", "# (C-n)", "self", ".", "_history", ".", "next_history", "(", "self", ".", "l_buffer", ")" ]
https://github.com/rootm0s/Protectors/blob/5b3f4d11687a5955caf9c3af30666c4bfc2c19ab/OWASP-ZSC/module/readline_windows/pyreadline/modes/notemacs.py#L146-L148
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/py_vulcanize/third_party/rcssmin/_setup/py3/ext.py
python
ConfTest.destroy
(self)
Destroy the conftest leftovers on disk
Destroy the conftest leftovers on disk
[ "Destroy", "the", "conftest", "leftovers", "on", "disk" ]
def destroy(self): """ Destroy the conftest leftovers on disk """ tempdir, self._tempdir = self._tempdir, None if tempdir is not None: _shutil.rmtree(tempdir)
[ "def", "destroy", "(", "self", ")", ":", "tempdir", ",", "self", ".", "_tempdir", "=", "self", ".", "_tempdir", ",", "None", "if", "tempdir", "is", "not", "None", ":", "_shutil", ".", "rmtree", "(", "tempdir", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/py_vulcanize/third_party/rcssmin/_setup/py3/ext.py#L196-L200
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBHostOS.__init__
(self)
__init__(self) -> SBHostOS
__init__(self) -> SBHostOS
[ "__init__", "(", "self", ")", "-", ">", "SBHostOS" ]
def __init__(self): """__init__(self) -> SBHostOS""" this = _lldb.new_SBHostOS() try: self.this.append(this) except: self.this = this
[ "def", "__init__", "(", "self", ")", ":", "this", "=", "_lldb", ".", "new_SBHostOS", "(", ")", "try", ":", "self", ".", "this", ".", "append", "(", "this", ")", "except", ":", "self", ".", "this", "=", "this" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L5115-L5119
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
benchmarks/operator_benchmark/benchmark_caffe2.py
python
Caffe2BenchmarkBase._device_option
(self, device)
return self.dev
This method is used to set device option.
This method is used to set device option.
[ "This", "method", "is", "used", "to", "set", "device", "option", "." ]
def _device_option(self, device): """ This method is used to set device option. """ if device not in ['cuda', 'cpu']: raise ValueError("Missing attrs in configs") if 'cuda' in device: self.dev = core.DeviceOption(caffe2_pb2.CUDA, 0) else: self.dev = core.DeviceOption(caffe2_pb2.CPU) return self.dev
[ "def", "_device_option", "(", "self", ",", "device", ")", ":", "if", "device", "not", "in", "[", "'cuda'", ",", "'cpu'", "]", ":", "raise", "ValueError", "(", "\"Missing attrs in configs\"", ")", "if", "'cuda'", "in", "device", ":", "self", ".", "dev", "=", "core", ".", "DeviceOption", "(", "caffe2_pb2", ".", "CUDA", ",", "0", ")", "else", ":", "self", ".", "dev", "=", "core", ".", "DeviceOption", "(", "caffe2_pb2", ".", "CPU", ")", "return", "self", ".", "dev" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/benchmarks/operator_benchmark/benchmark_caffe2.py#L30-L40
twhui/LiteFlowNet
00925aebf2db9ac50f4b1666f718688b10dd10d1
scripts/cpp_lint.py
python
CheckEmptyBlockBody
(filename, clean_lines, linenum, error)
Look for empty loop/conditional body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Look for empty loop/conditional body with only a single semicolon.
[ "Look", "for", "empty", "loop", "/", "conditional", "body", "with", "only", "a", "single", "semicolon", "." ]
def CheckEmptyBlockBody(filename, clean_lines, linenum, error): """Look for empty loop/conditional body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ # Search for loop keywords at the beginning of the line. Because only # whitespaces are allowed before the keywords, this will also ignore most # do-while-loops, since those lines should start with closing brace. # # We also check "if" blocks here, since an empty conditional block # is likely an error. line = clean_lines.elided[linenum] matched = Match(r'\s*(for|while|if)\s*\(', line) if matched: # Find the end of the conditional expression (end_line, end_linenum, end_pos) = CloseExpression( clean_lines, linenum, line.find('(')) # Output warning if what follows the condition expression is a semicolon. # No warning for all other cases, including whitespace or newline, since we # have a separate check for semicolons preceded by whitespace. if end_pos >= 0 and Match(r';', end_line[end_pos:]): if matched.group(1) == 'if': error(filename, end_linenum, 'whitespace/empty_conditional_body', 5, 'Empty conditional bodies should use {}') else: error(filename, end_linenum, 'whitespace/empty_loop_body', 5, 'Empty loop bodies should use {} or continue')
[ "def", "CheckEmptyBlockBody", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "# Search for loop keywords at the beginning of the line. Because only", "# whitespaces are allowed before the keywords, this will also ignore most", "# do-while-loops, since those lines should start with closing brace.", "#", "# We also check \"if\" blocks here, since an empty conditional block", "# is likely an error.", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "matched", "=", "Match", "(", "r'\\s*(for|while|if)\\s*\\('", ",", "line", ")", "if", "matched", ":", "# Find the end of the conditional expression", "(", "end_line", ",", "end_linenum", ",", "end_pos", ")", "=", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "line", ".", "find", "(", "'('", ")", ")", "# Output warning if what follows the condition expression is a semicolon.", "# No warning for all other cases, including whitespace or newline, since we", "# have a separate check for semicolons preceded by whitespace.", "if", "end_pos", ">=", "0", "and", "Match", "(", "r';'", ",", "end_line", "[", "end_pos", ":", "]", ")", ":", "if", "matched", ".", "group", "(", "1", ")", "==", "'if'", ":", "error", "(", "filename", ",", "end_linenum", ",", "'whitespace/empty_conditional_body'", ",", "5", ",", "'Empty conditional bodies should use {}'", ")", "else", ":", "error", "(", "filename", ",", "end_linenum", ",", "'whitespace/empty_loop_body'", ",", "5", ",", "'Empty loop bodies should use {} or continue'", ")" ]
https://github.com/twhui/LiteFlowNet/blob/00925aebf2db9ac50f4b1666f718688b10dd10d1/scripts/cpp_lint.py#L3243-L3275
Slicer/Slicer
ba9fadf332cb0303515b68d8d06a344c82e3e3e5
Utilities/Scripts/SlicerWizard/Utilities.py
python
inquire
(msg, choices=_yesno)
Get multiple-choice input from the user. :param msg: Text of the prompt which the user will be shown. :type msg: :class:`str` :param choices: Map of possible choices to their respective return values. :type choices: :class:`dict` :returns: Value of the selected choice. This function presents a question (``msg``) to the user and asks them to select an option from a list of choices, which are presented in the manner of 'git add --patch' (i.e. the possible choices are shown between the prompt text and the final '?'). The prompt is repeated indefinitely until a valid selection is made. The ``choices`` are a :class:`dict`, with each key being a possible choice (using a single letter is recommended). The value for the selected key is returned to the caller. The default ``choices`` provides a yes/no prompt with a :class:`bool` return value.
Get multiple-choice input from the user.
[ "Get", "multiple", "-", "choice", "input", "from", "the", "user", "." ]
def inquire(msg, choices=_yesno): """Get multiple-choice input from the user. :param msg: Text of the prompt which the user will be shown. :type msg: :class:`str` :param choices: Map of possible choices to their respective return values. :type choices: :class:`dict` :returns: Value of the selected choice. This function presents a question (``msg``) to the user and asks them to select an option from a list of choices, which are presented in the manner of 'git add --patch' (i.e. the possible choices are shown between the prompt text and the final '?'). The prompt is repeated indefinitely until a valid selection is made. The ``choices`` are a :class:`dict`, with each key being a possible choice (using a single letter is recommended). The value for the selected key is returned to the caller. The default ``choices`` provides a yes/no prompt with a :class:`bool` return value. """ choiceKeys = list(choices.keys()) msg = "{} {}? ".format(msg, ",".join(choiceKeys)) def throw(*args): raise ValueError() parser = argparse.ArgumentParser() parser.add_argument("choice", choices=choiceKeys) parser.error = throw while True: try: args = parser.parse_args(input(msg)) if args.choice in choices: return choices[args.choice] except: pass
[ "def", "inquire", "(", "msg", ",", "choices", "=", "_yesno", ")", ":", "choiceKeys", "=", "list", "(", "choices", ".", "keys", "(", ")", ")", "msg", "=", "\"{} {}? \"", ".", "format", "(", "msg", ",", "\",\"", ".", "join", "(", "choiceKeys", ")", ")", "def", "throw", "(", "*", "args", ")", ":", "raise", "ValueError", "(", ")", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "\"choice\"", ",", "choices", "=", "choiceKeys", ")", "parser", ".", "error", "=", "throw", "while", "True", ":", "try", ":", "args", "=", "parser", ".", "parse_args", "(", "input", "(", "msg", ")", ")", "if", "args", ".", "choice", "in", "choices", ":", "return", "choices", "[", "args", ".", "choice", "]", "except", ":", "pass" ]
https://github.com/Slicer/Slicer/blob/ba9fadf332cb0303515b68d8d06a344c82e3e3e5/Utilities/Scripts/SlicerWizard/Utilities.py#L132-L178
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/framework/ops.py
python
IndexedSlices.dtype
(self)
return self.values.dtype
The `DType` of elements in this tensor.
The `DType` of elements in this tensor.
[ "The", "DType", "of", "elements", "in", "this", "tensor", "." ]
def dtype(self): """The `DType` of elements in this tensor.""" return self.values.dtype
[ "def", "dtype", "(", "self", ")", ":", "return", "self", ".", "values", ".", "dtype" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/framework/ops.py#L879-L881
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/android/loading/cloud/common/clovis_task.py
python
ClovisTask.__init__
(self, action, action_params, backend_params)
See tools/android/loading/cloud/frontend/README.md for a specification of the parameters. Args: action(str): Action accomplished by this task. action_params(dict): Parameters of task. backend_params(dict): Parameters of the instances running the task. If this is None, no instances are created. If this dictionary has no 'tag' key, a unique tag will be generated.
See tools/android/loading/cloud/frontend/README.md for a specification of the parameters.
[ "See", "tools", "/", "android", "/", "loading", "/", "cloud", "/", "frontend", "/", "README", ".", "md", "for", "a", "specification", "of", "the", "parameters", "." ]
def __init__(self, action, action_params, backend_params): """ See tools/android/loading/cloud/frontend/README.md for a specification of the parameters. Args: action(str): Action accomplished by this task. action_params(dict): Parameters of task. backend_params(dict): Parameters of the instances running the task. If this is None, no instances are created. If this dictionary has no 'tag' key, a unique tag will be generated. """ self._action = action self._action_params = action_params or {} self._backend_params = backend_params or {} # If no tag is specified, generate a unique tag. if not self._backend_params.get('tag'): self._backend_params.update({'tag': str(uuid.uuid1())})
[ "def", "__init__", "(", "self", ",", "action", ",", "action_params", ",", "backend_params", ")", ":", "self", ".", "_action", "=", "action", "self", ".", "_action_params", "=", "action_params", "or", "{", "}", "self", ".", "_backend_params", "=", "backend_params", "or", "{", "}", "# If no tag is specified, generate a unique tag.", "if", "not", "self", ".", "_backend_params", ".", "get", "(", "'tag'", ")", ":", "self", ".", "_backend_params", ".", "update", "(", "{", "'tag'", ":", "str", "(", "uuid", ".", "uuid1", "(", ")", ")", "}", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/android/loading/cloud/common/clovis_task.py#L14-L30
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/seacas/scripts/exomerge3.py
python
ExodusModel._distance_squared_between
(point_one, point_two)
return ((point_two[0] - point_one[0])**2 + (point_two[1] - point_one[1])**2 + (point_two[2] - point_one[2])**2)
Return the distance squared between two three-dimensional points.
Return the distance squared between two three-dimensional points.
[ "Return", "the", "distance", "squared", "between", "two", "three", "-", "dimensional", "points", "." ]
def _distance_squared_between(point_one, point_two): """Return the distance squared between two three-dimensional points.""" return ((point_two[0] - point_one[0])**2 + (point_two[1] - point_one[1])**2 + (point_two[2] - point_one[2])**2)
[ "def", "_distance_squared_between", "(", "point_one", ",", "point_two", ")", ":", "return", "(", "(", "point_two", "[", "0", "]", "-", "point_one", "[", "0", "]", ")", "**", "2", "+", "(", "point_two", "[", "1", "]", "-", "point_one", "[", "1", "]", ")", "**", "2", "+", "(", "point_two", "[", "2", "]", "-", "point_one", "[", "2", "]", ")", "**", "2", ")" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge3.py#L6813-L6817
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Tools/pynche/ColorDB.py
python
ColorDB.find_byrgb
(self, rgbtuple)
Return name for rgbtuple
Return name for rgbtuple
[ "Return", "name", "for", "rgbtuple" ]
def find_byrgb(self, rgbtuple): """Return name for rgbtuple""" try: return self.__byrgb[rgbtuple] except KeyError: raise BadColor(rgbtuple) from None
[ "def", "find_byrgb", "(", "self", ",", "rgbtuple", ")", ":", "try", ":", "return", "self", ".", "__byrgb", "[", "rgbtuple", "]", "except", "KeyError", ":", "raise", "BadColor", "(", "rgbtuple", ")", "from", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Tools/pynche/ColorDB.py#L85-L90
intel/ros_object_analytics
eb0208edbb6da67e5d5c4092fd2964a2c8d9838e
object_analytics_visualization/scripts/marker_publisher.py
python
ObjectItem.__init__
(self, header, roi, track_id, detected_object, min, max)
Build an instance from message header, region of interest, tracking id, detected object and 3d min max. Args: header (std_msgs.msg.Header): Message header roi (RegionOfInterest): Region of interest track_id (int): Tracking id detected_object (Object): Instance of object_msgs.msg.Object min (geometry_msgs.msg.Point32): Min position in 3d space max (geometry_msgs.msg.Point32): Max position in 3d space
Build an instance from message header, region of interest, tracking id, detected object and 3d min max.
[ "Build", "an", "instance", "from", "message", "header", "region", "of", "interest", "tracking", "id", "detected", "object", "and", "3d", "min", "max", "." ]
def __init__(self, header, roi, track_id, detected_object, min, max): """Build an instance from message header, region of interest, tracking id, detected object and 3d min max. Args: header (std_msgs.msg.Header): Message header roi (RegionOfInterest): Region of interest track_id (int): Tracking id detected_object (Object): Instance of object_msgs.msg.Object min (geometry_msgs.msg.Point32): Min position in 3d space max (geometry_msgs.msg.Point32): Max position in 3d space """ self._header = header self._roi = roi self._track_id = track_id self._object = detected_object self._p1 = deepcopy(min) self._p2 = deepcopy(self._p1) self._p2.x = max.x self._p3 = deepcopy(self._p2) self._p3.z = max.z self._p4 = deepcopy(self._p3) self._p4.x = min.x self._p5 = deepcopy(min) self._p5.y = max.y self._p6 = deepcopy(self._p5) self._p6.x = max.x self._p7 = deepcopy(self._p6) self._p7.z = max.z self._p8 = deepcopy(self._p7) self._p8.x = min.x
[ "def", "__init__", "(", "self", ",", "header", ",", "roi", ",", "track_id", ",", "detected_object", ",", "min", ",", "max", ")", ":", "self", ".", "_header", "=", "header", "self", ".", "_roi", "=", "roi", "self", ".", "_track_id", "=", "track_id", "self", ".", "_object", "=", "detected_object", "self", ".", "_p1", "=", "deepcopy", "(", "min", ")", "self", ".", "_p2", "=", "deepcopy", "(", "self", ".", "_p1", ")", "self", ".", "_p2", ".", "x", "=", "max", ".", "x", "self", ".", "_p3", "=", "deepcopy", "(", "self", ".", "_p2", ")", "self", ".", "_p3", ".", "z", "=", "max", ".", "z", "self", ".", "_p4", "=", "deepcopy", "(", "self", ".", "_p3", ")", "self", ".", "_p4", ".", "x", "=", "min", ".", "x", "self", ".", "_p5", "=", "deepcopy", "(", "min", ")", "self", ".", "_p5", ".", "y", "=", "max", ".", "y", "self", ".", "_p6", "=", "deepcopy", "(", "self", ".", "_p5", ")", "self", ".", "_p6", ".", "x", "=", "max", ".", "x", "self", ".", "_p7", "=", "deepcopy", "(", "self", ".", "_p6", ")", "self", ".", "_p7", ".", "z", "=", "max", ".", "z", "self", ".", "_p8", "=", "deepcopy", "(", "self", ".", "_p7", ")", "self", ".", "_p8", ".", "x", "=", "min", ".", "x" ]
https://github.com/intel/ros_object_analytics/blob/eb0208edbb6da67e5d5c4092fd2964a2c8d9838e/object_analytics_visualization/scripts/marker_publisher.py#L64-L101
LARG/HFO
b8b2a1d462823c6732f4d5581aa7fe2e371d55cb
hfo/hfo.py
python
HFOEnvironment.getState
(self, state_data=None)
return state_data
Returns the current state features
Returns the current state features
[ "Returns", "the", "current", "state", "features" ]
def getState(self, state_data=None): """ Returns the current state features """ if state_data is None: state_data = np.zeros(self.getStateSize(), dtype=np.float32) hfo_lib.getState(self.obj, as_ctypes(state_data)) return state_data
[ "def", "getState", "(", "self", ",", "state_data", "=", "None", ")", ":", "if", "state_data", "is", "None", ":", "state_data", "=", "np", ".", "zeros", "(", "self", ".", "getStateSize", "(", ")", ",", "dtype", "=", "np", ".", "float32", ")", "hfo_lib", ".", "getState", "(", "self", ".", "obj", ",", "as_ctypes", "(", "state_data", ")", ")", "return", "state_data" ]
https://github.com/LARG/HFO/blob/b8b2a1d462823c6732f4d5581aa7fe2e371d55cb/hfo/hfo.py#L150-L155
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_controls.py
python
HelpProvider_Set
(*args, **kwargs)
return _controls_.HelpProvider_Set(*args, **kwargs)
HelpProvider_Set(HelpProvider helpProvider) -> HelpProvider Sset the current, application-wide help provider. Returns the previous one. Unlike some other classes, the help provider is not created on demand. This must be explicitly done by the application.
HelpProvider_Set(HelpProvider helpProvider) -> HelpProvider
[ "HelpProvider_Set", "(", "HelpProvider", "helpProvider", ")", "-", ">", "HelpProvider" ]
def HelpProvider_Set(*args, **kwargs): """ HelpProvider_Set(HelpProvider helpProvider) -> HelpProvider Sset the current, application-wide help provider. Returns the previous one. Unlike some other classes, the help provider is not created on demand. This must be explicitly done by the application. """ return _controls_.HelpProvider_Set(*args, **kwargs)
[ "def", "HelpProvider_Set", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "HelpProvider_Set", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L6301-L6309
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Code/Tools/waf-1.7.13/waflib/Tools/gcc.py
python
find_gcc
(conf)
Find the program gcc, and if present, try to detect its version number
Find the program gcc, and if present, try to detect its version number
[ "Find", "the", "program", "gcc", "and", "if", "present", "try", "to", "detect", "its", "version", "number" ]
def find_gcc(conf): """ Find the program gcc, and if present, try to detect its version number """ cc = conf.find_program(['gcc', 'cc'], var='CC') cc = conf.cmd_to_list(cc) conf.get_cc_version(cc, gcc=True) conf.env.CC_NAME = 'gcc' conf.env.CC = cc
[ "def", "find_gcc", "(", "conf", ")", ":", "cc", "=", "conf", ".", "find_program", "(", "[", "'gcc'", ",", "'cc'", "]", ",", "var", "=", "'CC'", ")", "cc", "=", "conf", ".", "cmd_to_list", "(", "cc", ")", "conf", ".", "get_cc_version", "(", "cc", ",", "gcc", "=", "True", ")", "conf", ".", "env", ".", "CC_NAME", "=", "'gcc'", "conf", ".", "env", ".", "CC", "=", "cc" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Code/Tools/waf-1.7.13/waflib/Tools/gcc.py#L15-L23
include-what-you-use/include-what-you-use
208fbfffa5d69364b9f78e427caa443441279283
fix_includes.py
python
_WriteFile
(filename, fileinfo, file_lines)
Write the given file-lines to the file.
Write the given file-lines to the file.
[ "Write", "the", "given", "file", "-", "lines", "to", "the", "file", "." ]
def _WriteFile(filename, fileinfo, file_lines): """Write the given file-lines to the file.""" try: with open(filename, 'wb') as f: # file_lines already have line endings, so join with ''. content = ''.join(file_lines) content = content.encode(fileinfo.encoding) f.write(content) except (IOError, OSError) as why: print("Error writing '%s': %s" % (filename, why))
[ "def", "_WriteFile", "(", "filename", ",", "fileinfo", ",", "file_lines", ")", ":", "try", ":", "with", "open", "(", "filename", ",", "'wb'", ")", "as", "f", ":", "# file_lines already have line endings, so join with ''.", "content", "=", "''", ".", "join", "(", "file_lines", ")", "content", "=", "content", ".", "encode", "(", "fileinfo", ".", "encoding", ")", "f", ".", "write", "(", "content", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "why", ":", "print", "(", "\"Error writing '%s': %s\"", "%", "(", "filename", ",", "why", ")", ")" ]
https://github.com/include-what-you-use/include-what-you-use/blob/208fbfffa5d69364b9f78e427caa443441279283/fix_includes.py#L606-L615
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
gpu/command_buffer/build_gles2_cmd_buffer.py
python
BucketFunction.WriteCommandDescription
(self, file)
Overridden from Function
Overridden from Function
[ "Overridden", "from", "Function" ]
def WriteCommandDescription(self, file): """Overridden from Function""" file.Write("//! Bucket version of command that corresponds to gl%s.\n" % self.original_name)
[ "def", "WriteCommandDescription", "(", "self", ",", "file", ")", ":", "file", ".", "Write", "(", "\"//! Bucket version of command that corresponds to gl%s.\\n\"", "%", "self", ".", "original_name", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/gpu/command_buffer/build_gles2_cmd_buffer.py#L6793-L6796
NVIDIA/TensorRT
42805f078052daad1a98bc5965974fcffaad0960
demo/HuggingFace/NNDF/models.py
python
ModelFileConverter.onnx_to_torch
( self, output_fpath: str, input_fpath: str, network_metadata: NetworkMetadata )
Converts ONNX file into torch.Model which is written to disk. Arg: output_fpath (str): File location of the generated ONNX file. input_fpath (str): Input file location of the generated ONNX file. network_metadata (NetworkMetadata): Network metadata of the network being converted. Returns: TorchModelFile: Newly generated TorchModelFile
Converts ONNX file into torch.Model which is written to disk.
[ "Converts", "ONNX", "file", "into", "torch", ".", "Model", "which", "is", "written", "to", "disk", "." ]
def onnx_to_torch( self, output_fpath: str, input_fpath: str, network_metadata: NetworkMetadata ): """ Converts ONNX file into torch.Model which is written to disk. Arg: output_fpath (str): File location of the generated ONNX file. input_fpath (str): Input file location of the generated ONNX file. network_metadata (NetworkMetadata): Network metadata of the network being converted. Returns: TorchModelFile: Newly generated TorchModelFile """ raise NotImplementedError( "Current model does not support exporting to torch model." )
[ "def", "onnx_to_torch", "(", "self", ",", "output_fpath", ":", "str", ",", "input_fpath", ":", "str", ",", "network_metadata", ":", "NetworkMetadata", ")", ":", "raise", "NotImplementedError", "(", "\"Current model does not support exporting to torch model.\"", ")" ]
https://github.com/NVIDIA/TensorRT/blob/42805f078052daad1a98bc5965974fcffaad0960/demo/HuggingFace/NNDF/models.py#L71-L87
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/training/input.py
python
shuffle_batch_join
(tensors_list, batch_size, capacity, min_after_dequeue, seed=None, enqueue_many=False, shapes=None, allow_smaller_final_batch=False, shared_name=None, name=None)
Create batches by randomly shuffling tensors. The `tensors_list` argument is a list of tuples of tensors, or a list of dictionaries of tensors. Each element in the list is treated similarily to the `tensors` argument of `tf.train.shuffle_batch()`. This version enqueues a different list of tensors in different threads. It adds the following to the current `Graph`: * A shuffling queue into which tensors from `tensors_list` are enqueued. * A `dequeue_many` operation to create batches from the queue. * A `QueueRunner` to `QUEUE_RUNNER` collection, to enqueue the tensors from `tensors_list`. `len(tensors_list)` threads will be started, with thread `i` enqueuing the tensors from `tensors_list[i]`. `tensors_list[i1][j]` must match `tensors_list[i2][j]` in type and shape, except in the first dimension if `enqueue_many` is true. If `enqueue_many` is `False`, each `tensors_list[i]` is assumed to represent a single example. An input tensor with shape `[x, y, z]` will be output as a tensor with shape `[batch_size, x, y, z]`. If `enqueue_many` is `True`, `tensors_list[i]` is assumed to represent a batch of examples, where the first dimension is indexed by example, and all members of `tensors_list[i]` should have the same size in the first dimension. If an input tensor has shape `[*, x, y, z]`, the output will have shape `[batch_size, x, y, z]`. The `capacity` argument controls the how long the prefetching is allowed to grow the queues. The returned operation is a dequeue operation and will throw `tf.errors.OutOfRangeError` if the input queue is exhausted. If this operation is feeding another input queue, its queue runner will catch this exception, however, if this operation is used in your main thread you are responsible for catching this yourself. If `allow_smaller_final_batch` is `True`, a smaller batch value than `batch_size` is returned when the queue is closed and there are not enough elements to fill the batch, otherwise the pending elements are discarded. In addition, all output tensors' static shapes, as accessed via the `get_shape` method will have a first `Dimension` value of `None`, and operations that depend on fixed batch_size would fail. Args: tensors_list: A list of tuples or dictionaries of tensors to enqueue. batch_size: An integer. The new batch size pulled from the queue. capacity: An integer. The maximum number of elements in the queue. min_after_dequeue: Minimum number elements in the queue after a dequeue, used to ensure a level of mixing of elements. seed: Seed for the random shuffling within the queue. enqueue_many: Whether each tensor in `tensor_list_list` is a single example. shapes: (Optional) The shapes for each example. Defaults to the inferred shapes for `tensors_list[i]`. allow_smaller_final_batch: (Optional) Boolean. If `True`, allow the final batch to be smaller if there are insufficient items left in the queue. shared_name: (optional). If set, this queue will be shared under the given name across multiple sessions. name: (Optional) A name for the operations. Returns: A list or dictionary of tensors with the same number and types as `tensors_list[i]`. Raises: ValueError: If the `shapes` are not specified, and cannot be inferred from the elements of `tensors_list`.
Create batches by randomly shuffling tensors.
[ "Create", "batches", "by", "randomly", "shuffling", "tensors", "." ]
def shuffle_batch_join(tensors_list, batch_size, capacity, min_after_dequeue, seed=None, enqueue_many=False, shapes=None, allow_smaller_final_batch=False, shared_name=None, name=None): """Create batches by randomly shuffling tensors. The `tensors_list` argument is a list of tuples of tensors, or a list of dictionaries of tensors. Each element in the list is treated similarily to the `tensors` argument of `tf.train.shuffle_batch()`. This version enqueues a different list of tensors in different threads. It adds the following to the current `Graph`: * A shuffling queue into which tensors from `tensors_list` are enqueued. * A `dequeue_many` operation to create batches from the queue. * A `QueueRunner` to `QUEUE_RUNNER` collection, to enqueue the tensors from `tensors_list`. `len(tensors_list)` threads will be started, with thread `i` enqueuing the tensors from `tensors_list[i]`. `tensors_list[i1][j]` must match `tensors_list[i2][j]` in type and shape, except in the first dimension if `enqueue_many` is true. If `enqueue_many` is `False`, each `tensors_list[i]` is assumed to represent a single example. An input tensor with shape `[x, y, z]` will be output as a tensor with shape `[batch_size, x, y, z]`. If `enqueue_many` is `True`, `tensors_list[i]` is assumed to represent a batch of examples, where the first dimension is indexed by example, and all members of `tensors_list[i]` should have the same size in the first dimension. If an input tensor has shape `[*, x, y, z]`, the output will have shape `[batch_size, x, y, z]`. The `capacity` argument controls the how long the prefetching is allowed to grow the queues. The returned operation is a dequeue operation and will throw `tf.errors.OutOfRangeError` if the input queue is exhausted. If this operation is feeding another input queue, its queue runner will catch this exception, however, if this operation is used in your main thread you are responsible for catching this yourself. If `allow_smaller_final_batch` is `True`, a smaller batch value than `batch_size` is returned when the queue is closed and there are not enough elements to fill the batch, otherwise the pending elements are discarded. In addition, all output tensors' static shapes, as accessed via the `get_shape` method will have a first `Dimension` value of `None`, and operations that depend on fixed batch_size would fail. Args: tensors_list: A list of tuples or dictionaries of tensors to enqueue. batch_size: An integer. The new batch size pulled from the queue. capacity: An integer. The maximum number of elements in the queue. min_after_dequeue: Minimum number elements in the queue after a dequeue, used to ensure a level of mixing of elements. seed: Seed for the random shuffling within the queue. enqueue_many: Whether each tensor in `tensor_list_list` is a single example. shapes: (Optional) The shapes for each example. Defaults to the inferred shapes for `tensors_list[i]`. allow_smaller_final_batch: (Optional) Boolean. If `True`, allow the final batch to be smaller if there are insufficient items left in the queue. shared_name: (optional). If set, this queue will be shared under the given name across multiple sessions. name: (Optional) A name for the operations. Returns: A list or dictionary of tensors with the same number and types as `tensors_list[i]`. Raises: ValueError: If the `shapes` are not specified, and cannot be inferred from the elements of `tensors_list`. """ tensor_list_list = _as_tensor_list_list(tensors_list) with ops.op_scope( _flatten(tensor_list_list), name, "shuffle_batch_join") as name: tensor_list_list = _validate_join(tensor_list_list) tensor_list_list, sparse_info = _serialize_sparse_tensors_join( tensor_list_list, enqueue_many) types = _dtypes(tensor_list_list) shapes = _shapes(tensor_list_list, shapes, enqueue_many) queue = data_flow_ops.RandomShuffleQueue( capacity=capacity, min_after_dequeue=min_after_dequeue, seed=seed, dtypes=types, shapes=shapes, shared_name=shared_name) _enqueue_join(queue, tensor_list_list, enqueue_many) full = (math_ops.cast(math_ops.maximum(0, queue.size() - min_after_dequeue), dtypes.float32) * (1. / (capacity - min_after_dequeue))) # Note that name contains a '/' at the end so we intentionally do not place # a '/' after %s below. summary_name = ( "queue/%sfraction_over_%d_of_%d_full" % (name, min_after_dequeue, capacity - min_after_dequeue)) logging_ops.scalar_summary(summary_name, full) if allow_smaller_final_batch: dequeued = queue.dequeue_up_to(batch_size, name=name) else: dequeued = queue.dequeue_many(batch_size, name=name) dequeued = _deserialize_sparse_tensors(dequeued, sparse_info) # tensors_list was validated to not be empty. return _as_original_type(tensors_list[0], dequeued)
[ "def", "shuffle_batch_join", "(", "tensors_list", ",", "batch_size", ",", "capacity", ",", "min_after_dequeue", ",", "seed", "=", "None", ",", "enqueue_many", "=", "False", ",", "shapes", "=", "None", ",", "allow_smaller_final_batch", "=", "False", ",", "shared_name", "=", "None", ",", "name", "=", "None", ")", ":", "tensor_list_list", "=", "_as_tensor_list_list", "(", "tensors_list", ")", "with", "ops", ".", "op_scope", "(", "_flatten", "(", "tensor_list_list", ")", ",", "name", ",", "\"shuffle_batch_join\"", ")", "as", "name", ":", "tensor_list_list", "=", "_validate_join", "(", "tensor_list_list", ")", "tensor_list_list", ",", "sparse_info", "=", "_serialize_sparse_tensors_join", "(", "tensor_list_list", ",", "enqueue_many", ")", "types", "=", "_dtypes", "(", "tensor_list_list", ")", "shapes", "=", "_shapes", "(", "tensor_list_list", ",", "shapes", ",", "enqueue_many", ")", "queue", "=", "data_flow_ops", ".", "RandomShuffleQueue", "(", "capacity", "=", "capacity", ",", "min_after_dequeue", "=", "min_after_dequeue", ",", "seed", "=", "seed", ",", "dtypes", "=", "types", ",", "shapes", "=", "shapes", ",", "shared_name", "=", "shared_name", ")", "_enqueue_join", "(", "queue", ",", "tensor_list_list", ",", "enqueue_many", ")", "full", "=", "(", "math_ops", ".", "cast", "(", "math_ops", ".", "maximum", "(", "0", ",", "queue", ".", "size", "(", ")", "-", "min_after_dequeue", ")", ",", "dtypes", ".", "float32", ")", "*", "(", "1.", "/", "(", "capacity", "-", "min_after_dequeue", ")", ")", ")", "# Note that name contains a '/' at the end so we intentionally do not place", "# a '/' after %s below.", "summary_name", "=", "(", "\"queue/%sfraction_over_%d_of_%d_full\"", "%", "(", "name", ",", "min_after_dequeue", ",", "capacity", "-", "min_after_dequeue", ")", ")", "logging_ops", ".", "scalar_summary", "(", "summary_name", ",", "full", ")", "if", "allow_smaller_final_batch", ":", "dequeued", "=", "queue", ".", "dequeue_up_to", "(", "batch_size", ",", "name", "=", "name", ")", "else", ":", "dequeued", "=", "queue", ".", "dequeue_many", "(", "batch_size", ",", "name", "=", "name", ")", "dequeued", "=", "_deserialize_sparse_tensors", "(", "dequeued", ",", "sparse_info", ")", "# tensors_list was validated to not be empty.", "return", "_as_original_type", "(", "tensors_list", "[", "0", "]", ",", "dequeued", ")" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/training/input.py#L822-L924
esa/pykep
b410363653623730b577de257c04b0e0289f2014
pykep/trajopt/gym/_cassini1.py
python
_cassini1_udp.__init__
(self)
Write Me
Write Me
[ "Write", "Me" ]
def __init__(self): """ Write Me """ super().__init__( seq=_seq_cassini1, t0=[-1000., 0.], tof=[[30, 400], [100, 470], [30, 400], [400, 2000], [1000, 6000]], vinf=3., tof_encoding='direct', orbit_insertion=True, e_target=0.98, rp_target=108950000)
[ "def", "__init__", "(", "self", ")", ":", "super", "(", ")", ".", "__init__", "(", "seq", "=", "_seq_cassini1", ",", "t0", "=", "[", "-", "1000.", ",", "0.", "]", ",", "tof", "=", "[", "[", "30", ",", "400", "]", ",", "[", "100", ",", "470", "]", ",", "[", "30", ",", "400", "]", ",", "[", "400", ",", "2000", "]", ",", "[", "1000", ",", "6000", "]", "]", ",", "vinf", "=", "3.", ",", "tof_encoding", "=", "'direct'", ",", "orbit_insertion", "=", "True", ",", "e_target", "=", "0.98", ",", "rp_target", "=", "108950000", ")" ]
https://github.com/esa/pykep/blob/b410363653623730b577de257c04b0e0289f2014/pykep/trajopt/gym/_cassini1.py#L17-L29
MythTV/mythtv
d282a209cb8be85d036f85a62a8ec971b67d45f4
mythtv/programs/scripts/internetcontent/nv_python_libs/dailymotion/dailymotion_api.py
python
OutStreamEncoder.write
(self, obj)
Wraps the output stream, encoding Unicode strings with the specified encoding
Wraps the output stream, encoding Unicode strings with the specified encoding
[ "Wraps", "the", "output", "stream", "encoding", "Unicode", "strings", "with", "the", "specified", "encoding" ]
def write(self, obj): """Wraps the output stream, encoding Unicode strings with the specified encoding""" if isinstance(obj, str): obj = obj.encode(self.encoding) self.out.buffer.write(obj)
[ "def", "write", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "str", ")", ":", "obj", "=", "obj", ".", "encode", "(", "self", ".", "encoding", ")", "self", ".", "out", ".", "buffer", ".", "write", "(", "obj", ")" ]
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/internetcontent/nv_python_libs/dailymotion/dailymotion_api.py#L59-L63
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/mhlib.py
python
Message.__init__
(self, f, n, fp = None)
Constructor.
Constructor.
[ "Constructor", "." ]
def __init__(self, f, n, fp = None): """Constructor.""" self.folder = f self.number = n if fp is None: path = f.getmessagefilename(n) fp = open(path, 'r') mimetools.Message.__init__(self, fp)
[ "def", "__init__", "(", "self", ",", "f", ",", "n", ",", "fp", "=", "None", ")", ":", "self", ".", "folder", "=", "f", "self", ".", "number", "=", "n", "if", "fp", "is", "None", ":", "path", "=", "f", ".", "getmessagefilename", "(", "n", ")", "fp", "=", "open", "(", "path", ",", "'r'", ")", "mimetools", ".", "Message", ".", "__init__", "(", "self", ",", "fp", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/mhlib.py#L665-L672
flann-lib/flann
1d04523268c388dabf1c0865d69e1b638c8c7d9d
src/python/pyflann/index.py
python
FLANN.save_index
(self, filename)
This saves the index to a disk file.
This saves the index to a disk file.
[ "This", "saves", "the", "index", "to", "a", "disk", "file", "." ]
def save_index(self, filename): """ This saves the index to a disk file. """ if self.__curindex is not None: flann.save_index[self.__curindex_type]( self.__curindex, c_char_p(to_bytes(filename)))
[ "def", "save_index", "(", "self", ",", "filename", ")", ":", "if", "self", ".", "__curindex", "is", "not", "None", ":", "flann", ".", "save_index", "[", "self", ".", "__curindex_type", "]", "(", "self", ".", "__curindex", ",", "c_char_p", "(", "to_bytes", "(", "filename", ")", ")", ")" ]
https://github.com/flann-lib/flann/blob/1d04523268c388dabf1c0865d69e1b638c8c7d9d/src/python/pyflann/index.py#L184-L190
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/idlelib/searchengine.py
python
search_reverse
(prog, chars, col)
return found
Search backwards and return an re match object or None. This is done by searching forwards until there is no match. Prog: compiled re object with a search method returning a match. Chars: line of text, without \\n. Col: stop index for the search; the limit for match.end().
Search backwards and return an re match object or None.
[ "Search", "backwards", "and", "return", "an", "re", "match", "object", "or", "None", "." ]
def search_reverse(prog, chars, col): '''Search backwards and return an re match object or None. This is done by searching forwards until there is no match. Prog: compiled re object with a search method returning a match. Chars: line of text, without \\n. Col: stop index for the search; the limit for match.end(). ''' m = prog.search(chars) if not m: return None found = None i, j = m.span() # m.start(), m.end() == match slice indexes while i < col and j <= col: found = m if i == j: j = j+1 m = prog.search(chars, j) if not m: break i, j = m.span() return found
[ "def", "search_reverse", "(", "prog", ",", "chars", ",", "col", ")", ":", "m", "=", "prog", ".", "search", "(", "chars", ")", "if", "not", "m", ":", "return", "None", "found", "=", "None", "i", ",", "j", "=", "m", ".", "span", "(", ")", "# m.start(), m.end() == match slice indexes", "while", "i", "<", "col", "and", "j", "<=", "col", ":", "found", "=", "m", "if", "i", "==", "j", ":", "j", "=", "j", "+", "1", "m", "=", "prog", ".", "search", "(", "chars", ",", "j", ")", "if", "not", "m", ":", "break", "i", ",", "j", "=", "m", ".", "span", "(", ")", "return", "found" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/idlelib/searchengine.py#L192-L213
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/codedeploy/layer1.py
python
CodeDeployConnection.batch_get_deployments
(self, deployment_ids=None)
return self.make_request(action='BatchGetDeployments', body=json.dumps(params))
Gets information about one or more deployments. :type deployment_ids: list :param deployment_ids: A list of deployment IDs, with multiple deployment IDs separated by spaces.
Gets information about one or more deployments.
[ "Gets", "information", "about", "one", "or", "more", "deployments", "." ]
def batch_get_deployments(self, deployment_ids=None): """ Gets information about one or more deployments. :type deployment_ids: list :param deployment_ids: A list of deployment IDs, with multiple deployment IDs separated by spaces. """ params = {} if deployment_ids is not None: params['deploymentIds'] = deployment_ids return self.make_request(action='BatchGetDeployments', body=json.dumps(params))
[ "def", "batch_get_deployments", "(", "self", ",", "deployment_ids", "=", "None", ")", ":", "params", "=", "{", "}", "if", "deployment_ids", "is", "not", "None", ":", "params", "[", "'deploymentIds'", "]", "=", "deployment_ids", "return", "self", ".", "make_request", "(", "action", "=", "'BatchGetDeployments'", ",", "body", "=", "json", ".", "dumps", "(", "params", ")", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/codedeploy/layer1.py#L178-L191
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/btm_utils.py
python
rec_test
(sequence, test_func)
Tests test_func on all items of sequence and items of included sub-iterables
Tests test_func on all items of sequence and items of included sub-iterables
[ "Tests", "test_func", "on", "all", "items", "of", "sequence", "and", "items", "of", "included", "sub", "-", "iterables" ]
def rec_test(sequence, test_func): """Tests test_func on all items of sequence and items of included sub-iterables""" for x in sequence: if isinstance(x, (list, tuple)): yield from rec_test(x, test_func) else: yield test_func(x)
[ "def", "rec_test", "(", "sequence", ",", "test_func", ")", ":", "for", "x", "in", "sequence", ":", "if", "isinstance", "(", "x", ",", "(", "list", ",", "tuple", ")", ")", ":", "yield", "from", "rec_test", "(", "x", ",", "test_func", ")", "else", ":", "yield", "test_func", "(", "x", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/btm_utils.py#L274-L281
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/ma/core.py
python
MaskedArray.__iadd__
(self, other)
return self
Add other to self in-place.
Add other to self in-place.
[ "Add", "other", "to", "self", "in", "-", "place", "." ]
def __iadd__(self, other): "Add other to self in-place." m = getmask(other) if self._mask is nomask: if m is not nomask and m.any(): self._mask = make_mask_none(self.shape, self.dtype) self._mask += m else: if m is not nomask: self._mask += m ndarray.__iadd__(self._data, np.where(self._mask, 0, getdata(other))) return self
[ "def", "__iadd__", "(", "self", ",", "other", ")", ":", "m", "=", "getmask", "(", "other", ")", "if", "self", ".", "_mask", "is", "nomask", ":", "if", "m", "is", "not", "nomask", "and", "m", ".", "any", "(", ")", ":", "self", ".", "_mask", "=", "make_mask_none", "(", "self", ".", "shape", ",", "self", ".", "dtype", ")", "self", ".", "_mask", "+=", "m", "else", ":", "if", "m", "is", "not", "nomask", ":", "self", ".", "_mask", "+=", "m", "ndarray", ".", "__iadd__", "(", "self", ".", "_data", ",", "np", ".", "where", "(", "self", ".", "_mask", ",", "0", ",", "getdata", "(", "other", ")", ")", ")", "return", "self" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/ma/core.py#L3741-L3752
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/data_flow_ops.py
python
MapStagingArea._pop
(self, key, indices=None, name=None)
return key, self._get_return_value(result, indices)
Remove and return the associated (key, value) is returned from the staging area. If the key is not in the staging area, this method will block until the associated (key, value) is inserted. Args: key: Key associated with the required data indices: Partial list of tensors to retrieve (optional). A list of integer or string indices. String indices are only valid if the Staging Area has names associated with it. name: A name for the operation (optional) Returns: The created op
Remove and return the associated (key, value) is returned from the staging area. If the key is not in the staging area, this method will block until the associated (key, value) is inserted.
[ "Remove", "and", "return", "the", "associated", "(", "key", "value", ")", "is", "returned", "from", "the", "staging", "area", ".", "If", "the", "key", "is", "not", "in", "the", "staging", "area", "this", "method", "will", "block", "until", "the", "associated", "(", "key", "value", ")", "is", "inserted", "." ]
def _pop(self, key, indices=None, name=None): """ Remove and return the associated (key, value) is returned from the staging area. If the key is not in the staging area, this method will block until the associated (key, value) is inserted. Args: key: Key associated with the required data indices: Partial list of tensors to retrieve (optional). A list of integer or string indices. String indices are only valid if the Staging Area has names associated with it. name: A name for the operation (optional) Returns: The created op """ if name is None: name = "%s_get" % self._name indices, dtypes = self._get_indices_and_dtypes(indices) with ops.colocate_with(self._coloc_op): result = self._pop_fn(key, shared_name=self._name, indices=indices, dtypes=dtypes, name=name, capacity=self._capacity, memory_limit=self._memory_limit) return key, self._get_return_value(result, indices)
[ "def", "_pop", "(", "self", ",", "key", ",", "indices", "=", "None", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "name", "=", "\"%s_get\"", "%", "self", ".", "_name", "indices", ",", "dtypes", "=", "self", ".", "_get_indices_and_dtypes", "(", "indices", ")", "with", "ops", ".", "colocate_with", "(", "self", ".", "_coloc_op", ")", ":", "result", "=", "self", ".", "_pop_fn", "(", "key", ",", "shared_name", "=", "self", ".", "_name", ",", "indices", "=", "indices", ",", "dtypes", "=", "dtypes", ",", "name", "=", "name", ",", "capacity", "=", "self", ".", "_capacity", ",", "memory_limit", "=", "self", ".", "_memory_limit", ")", "return", "key", ",", "self", ".", "_get_return_value", "(", "result", ",", "indices", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/data_flow_ops.py#L2026-L2057
envoyproxy/envoy
65541accdafe255e72310b4298d646e091da2d80
tools/api_proto_plugin/type_context.py
python
TypeContext.leading_detached_comments
(self)
return self.source_code_info.leading_detached_comments_path_lookup(self.path)
Leading detached comments for type context.
Leading detached comments for type context.
[ "Leading", "detached", "comments", "for", "type", "context", "." ]
def leading_detached_comments(self): """Leading detached comments for type context.""" return self.source_code_info.leading_detached_comments_path_lookup(self.path)
[ "def", "leading_detached_comments", "(", "self", ")", ":", "return", "self", ".", "source_code_info", ".", "leading_detached_comments_path_lookup", "(", "self", ".", "path", ")" ]
https://github.com/envoyproxy/envoy/blob/65541accdafe255e72310b4298d646e091da2d80/tools/api_proto_plugin/type_context.py#L267-L269
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/python/ops/random_ops.py
python
random_crop
(value, size, seed=None, name=None)
Randomly crops a tensor to a given size. Slices a shape `size` portion out of `value` at a uniformly chosen offset. Requires `value.shape >= size`. If a dimension should not be cropped, pass the full size of that dimension. For example, RGB images can be cropped with `size = [crop_height, crop_width, 3]`. Args: value: Input tensor to crop. size: 1-D tensor with size the rank of `value`. seed: Python integer. Used to create a random seed. See [`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed) for behavior. name: A name for this operation (optional). Returns: A cropped tensor of the same rank as `value` and shape `size`.
Randomly crops a tensor to a given size.
[ "Randomly", "crops", "a", "tensor", "to", "a", "given", "size", "." ]
def random_crop(value, size, seed=None, name=None): """Randomly crops a tensor to a given size. Slices a shape `size` portion out of `value` at a uniformly chosen offset. Requires `value.shape >= size`. If a dimension should not be cropped, pass the full size of that dimension. For example, RGB images can be cropped with `size = [crop_height, crop_width, 3]`. Args: value: Input tensor to crop. size: 1-D tensor with size the rank of `value`. seed: Python integer. Used to create a random seed. See [`set_random_seed`](../../api_docs/python/constant_op.md#set_random_seed) for behavior. name: A name for this operation (optional). Returns: A cropped tensor of the same rank as `value` and shape `size`. """ # TODO(shlens): Implement edge case to guarantee output size dimensions. # If size > value.shape, zero pad the result so that it always has shape # exactly size. with ops.op_scope([value, size], name, "random_crop") as name: value = ops.convert_to_tensor(value, name="value") size = ops.convert_to_tensor(size, dtype=dtypes.int32, name="size") shape = array_ops.shape(value) check = logging_ops.Assert( math_ops.reduce_all(shape >= size), ["Need value.shape >= size, got ", shape, size]) shape = control_flow_ops.with_dependencies([check], shape) limit = shape - size + 1 offset = random_uniform( array_ops.shape(shape), dtype=size.dtype, maxval=size.dtype.max, seed=seed) % limit return array_ops.slice(value, offset, size, name=name)
[ "def", "random_crop", "(", "value", ",", "size", ",", "seed", "=", "None", ",", "name", "=", "None", ")", ":", "# TODO(shlens): Implement edge case to guarantee output size dimensions.", "# If size > value.shape, zero pad the result so that it always has shape", "# exactly size.", "with", "ops", ".", "op_scope", "(", "[", "value", ",", "size", "]", ",", "name", ",", "\"random_crop\"", ")", "as", "name", ":", "value", "=", "ops", ".", "convert_to_tensor", "(", "value", ",", "name", "=", "\"value\"", ")", "size", "=", "ops", ".", "convert_to_tensor", "(", "size", ",", "dtype", "=", "dtypes", ".", "int32", ",", "name", "=", "\"size\"", ")", "shape", "=", "array_ops", ".", "shape", "(", "value", ")", "check", "=", "logging_ops", ".", "Assert", "(", "math_ops", ".", "reduce_all", "(", "shape", ">=", "size", ")", ",", "[", "\"Need value.shape >= size, got \"", ",", "shape", ",", "size", "]", ")", "shape", "=", "control_flow_ops", ".", "with_dependencies", "(", "[", "check", "]", ",", "shape", ")", "limit", "=", "shape", "-", "size", "+", "1", "offset", "=", "random_uniform", "(", "array_ops", ".", "shape", "(", "shape", ")", ",", "dtype", "=", "size", ".", "dtype", ",", "maxval", "=", "size", ".", "dtype", ".", "max", ",", "seed", "=", "seed", ")", "%", "limit", "return", "array_ops", ".", "slice", "(", "value", ",", "offset", ",", "size", ",", "name", "=", "name", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/python/ops/random_ops.py#L288-L326
mapnik/mapnik
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
scons/scons-local-4.1.0/SCons/Platform/virtualenv.py
python
_running_in_virtualenv
()
return (hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
Returns True if scons is executed within a virtualenv
Returns True if scons is executed within a virtualenv
[ "Returns", "True", "if", "scons", "is", "executed", "within", "a", "virtualenv" ]
def _running_in_virtualenv(): """Returns True if scons is executed within a virtualenv""" # see https://stackoverflow.com/a/42580137 return (hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix))
[ "def", "_running_in_virtualenv", "(", ")", ":", "# see https://stackoverflow.com/a/42580137", "return", "(", "hasattr", "(", "sys", ",", "'real_prefix'", ")", "or", "(", "hasattr", "(", "sys", ",", "'base_prefix'", ")", "and", "sys", ".", "base_prefix", "!=", "sys", ".", "prefix", ")", ")" ]
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Platform/virtualenv.py#L47-L51
llvm/llvm-project
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
lldb/examples/python/mach_o.py
python
TerminalColors.magenta
(self, fg=True)
return ''
Set the foreground or background color to magenta. The foreground color will be set if "fg" tests True. The background color will be set if "fg" tests False.
Set the foreground or background color to magenta. The foreground color will be set if "fg" tests True. The background color will be set if "fg" tests False.
[ "Set", "the", "foreground", "or", "background", "color", "to", "magenta", ".", "The", "foreground", "color", "will", "be", "set", "if", "fg", "tests", "True", ".", "The", "background", "color", "will", "be", "set", "if", "fg", "tests", "False", "." ]
def magenta(self, fg=True): '''Set the foreground or background color to magenta. The foreground color will be set if "fg" tests True. The background color will be set if "fg" tests False.''' if self.enabled: if fg: return "\x1b[35m" else: return "\x1b[45m" return ''
[ "def", "magenta", "(", "self", ",", "fg", "=", "True", ")", ":", "if", "self", ".", "enabled", ":", "if", "fg", ":", "return", "\"\\x1b[35m\"", "else", ":", "return", "\"\\x1b[45m\"", "return", "''" ]
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/lldb/examples/python/mach_o.py#L321-L329
zcash/zcash
944453065b40f6bed6bd59c4ff01c4d123c6cdb7
contrib/devtools/security-check.py
python
check_PE_DYNAMIC_BASE
(executable)
return (bits & reqbits) == reqbits
PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)
PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)
[ "PIE", ":", "DllCharacteristics", "bit", "0x40", "signifies", "dynamicbase", "(", "ASLR", ")" ]
def check_PE_DYNAMIC_BASE(executable): '''PIE: DllCharacteristics bit 0x40 signifies dynamicbase (ASLR)''' (arch,bits) = get_PE_dll_characteristics(executable) reqbits = IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE return (bits & reqbits) == reqbits
[ "def", "check_PE_DYNAMIC_BASE", "(", "executable", ")", ":", "(", "arch", ",", "bits", ")", "=", "get_PE_dll_characteristics", "(", "executable", ")", "reqbits", "=", "IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE", "return", "(", "bits", "&", "reqbits", ")", "==", "reqbits" ]
https://github.com/zcash/zcash/blob/944453065b40f6bed6bd59c4ff01c4d123c6cdb7/contrib/devtools/security-check.py#L141-L145