nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bristolcrypto/SPDZ-2
|
721abfae849625a02ea49aabc534f9cf41ca643f
|
Compiler/path_oram.py
|
python
|
shuffle
|
(x, config=None, value_type=sgf2n, reverse=False)
|
return config
|
Simulate secure shuffling with Waksman network for 2 players.
Returns the network switching config so it may be re-used later.
|
Simulate secure shuffling with Waksman network for 2 players.
|
[
"Simulate",
"secure",
"shuffling",
"with",
"Waksman",
"network",
"for",
"2",
"players",
"."
] |
def shuffle(x, config=None, value_type=sgf2n, reverse=False):
""" Simulate secure shuffling with Waksman network for 2 players.
Returns the network switching config so it may be re-used later. """
n = len(x)
if n & (n-1) != 0:
raise CompilerError('shuffle requires n a power of 2')
if config is None:
config = permutation.configure_waksman(permutation.random_perm(n))
for i,c in enumerate(config):
config[i] = [value_type(b) for b in c]
permutation.waksman(x, config, reverse=reverse)
permutation.waksman(x, config, reverse=reverse)
return config
|
[
"def",
"shuffle",
"(",
"x",
",",
"config",
"=",
"None",
",",
"value_type",
"=",
"sgf2n",
",",
"reverse",
"=",
"False",
")",
":",
"n",
"=",
"len",
"(",
"x",
")",
"if",
"n",
"&",
"(",
"n",
"-",
"1",
")",
"!=",
"0",
":",
"raise",
"CompilerError",
"(",
"'shuffle requires n a power of 2'",
")",
"if",
"config",
"is",
"None",
":",
"config",
"=",
"permutation",
".",
"configure_waksman",
"(",
"permutation",
".",
"random_perm",
"(",
"n",
")",
")",
"for",
"i",
",",
"c",
"in",
"enumerate",
"(",
"config",
")",
":",
"config",
"[",
"i",
"]",
"=",
"[",
"value_type",
"(",
"b",
")",
"for",
"b",
"in",
"c",
"]",
"permutation",
".",
"waksman",
"(",
"x",
",",
"config",
",",
"reverse",
"=",
"reverse",
")",
"permutation",
".",
"waksman",
"(",
"x",
",",
"config",
",",
"reverse",
"=",
"reverse",
")",
"return",
"config"
] |
https://github.com/bristolcrypto/SPDZ-2/blob/721abfae849625a02ea49aabc534f9cf41ca643f/Compiler/path_oram.py#L112-L127
|
|
Kitware/ParaView
|
f760af9124ff4634b23ebbeab95a4f56e0261955
|
ThirdParty/cinema/paraview/tpl/cinema_python/database/raster_wrangler.py
|
python
|
RasterWrangler.valuereader
|
(self, fname, shape=None)
|
Opens a value image file and returns it as either a color buffer
or a floating point array (depending on how the image was exported).
|
Opens a value image file and returns it as either a color buffer
or a floating point array (depending on how the image was exported).
|
[
"Opens",
"a",
"value",
"image",
"file",
"and",
"returns",
"it",
"as",
"either",
"a",
"color",
"buffer",
"or",
"a",
"floating",
"point",
"array",
"(",
"depending",
"on",
"how",
"the",
"image",
"was",
"exported",
")",
"."
] |
def valuereader(self, fname, shape=None):
""" Opens a value image file and returns it as either a color buffer
or a floating point array (depending on how the image was exported)."""
baseName, ext = os.path.splitext(fname)
if ext == self.floatExtension():
# Treat as single channel floating point buffer.
return self.zreader(fname, shape)
else:
# Treat as a RGB buffer
return self.rgbreader(fname)
|
[
"def",
"valuereader",
"(",
"self",
",",
"fname",
",",
"shape",
"=",
"None",
")",
":",
"baseName",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fname",
")",
"if",
"ext",
"==",
"self",
".",
"floatExtension",
"(",
")",
":",
"# Treat as single channel floating point buffer.",
"return",
"self",
".",
"zreader",
"(",
"fname",
",",
"shape",
")",
"else",
":",
"# Treat as a RGB buffer",
"return",
"self",
".",
"rgbreader",
"(",
"fname",
")"
] |
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/ThirdParty/cinema/paraview/tpl/cinema_python/database/raster_wrangler.py#L275-L284
|
||
google/mozc
|
7329757e1ad30e327c1ae823a8302c79482d6b9c
|
src/build_tools/embed_file.py
|
python
|
_FormatAsUint64LittleEndian
|
(s)
|
return six.b('0x%s') % binascii.b2a_hex(s)
|
Formats a string as uint64 value in little endian order.
|
Formats a string as uint64 value in little endian order.
|
[
"Formats",
"a",
"string",
"as",
"uint64",
"value",
"in",
"little",
"endian",
"order",
"."
] |
def _FormatAsUint64LittleEndian(s):
"""Formats a string as uint64 value in little endian order."""
for _ in range(len(s), 8):
s += six.b('\0')
s = s[::-1] # Reverse the string
return six.b('0x%s') % binascii.b2a_hex(s)
|
[
"def",
"_FormatAsUint64LittleEndian",
"(",
"s",
")",
":",
"for",
"_",
"in",
"range",
"(",
"len",
"(",
"s",
")",
",",
"8",
")",
":",
"s",
"+=",
"six",
".",
"b",
"(",
"'\\0'",
")",
"s",
"=",
"s",
"[",
":",
":",
"-",
"1",
"]",
"# Reverse the string",
"return",
"six",
".",
"b",
"(",
"'0x%s'",
")",
"%",
"binascii",
".",
"b2a_hex",
"(",
"s",
")"
] |
https://github.com/google/mozc/blob/7329757e1ad30e327c1ae823a8302c79482d6b9c/src/build_tools/embed_file.py#L51-L56
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/layers/conv_batchnorm.py
|
python
|
_ConvBatchNormMixin._get_config
|
(self, conv_config)
|
return dict(
list(conv_config.items()) + list(batchnorm_config.items()) +
list(config.items()))
|
All shared get_config logic for fused layers.
|
All shared get_config logic for fused layers.
|
[
"All",
"shared",
"get_config",
"logic",
"for",
"fused",
"layers",
"."
] |
def _get_config(self, conv_config):
"""All shared get_config logic for fused layers."""
batchnorm_config = self.batchnorm.get_config()
# Both BatchNorm and Conv2D have config items from base layer. Since
# _ConvBatchNorm2D inherits from Conv2D, we should use base layer config
# items from self, rather than self.batchnorm.
# For now, deleting 'name', but ideally all base_config items should be
# removed.
# TODO(pulkitb): Raise error if base_configs in both layers incompatible.
batchnorm_config.pop('name')
is_advanced_activation = isinstance(self.post_activation,
keras.layers.Layer)
if is_advanced_activation:
serialized_activation = keras.utils.serialize_keras_object(
self.post_activation)
else:
serialized_activation = activations.serialize(self.post_activation)
config = {
'is_quantized': self.is_quantized,
'post_activation': serialized_activation
}
return dict(
list(conv_config.items()) + list(batchnorm_config.items()) +
list(config.items()))
|
[
"def",
"_get_config",
"(",
"self",
",",
"conv_config",
")",
":",
"batchnorm_config",
"=",
"self",
".",
"batchnorm",
".",
"get_config",
"(",
")",
"# Both BatchNorm and Conv2D have config items from base layer. Since",
"# _ConvBatchNorm2D inherits from Conv2D, we should use base layer config",
"# items from self, rather than self.batchnorm.",
"# For now, deleting 'name', but ideally all base_config items should be",
"# removed.",
"# TODO(pulkitb): Raise error if base_configs in both layers incompatible.",
"batchnorm_config",
".",
"pop",
"(",
"'name'",
")",
"is_advanced_activation",
"=",
"isinstance",
"(",
"self",
".",
"post_activation",
",",
"keras",
".",
"layers",
".",
"Layer",
")",
"if",
"is_advanced_activation",
":",
"serialized_activation",
"=",
"keras",
".",
"utils",
".",
"serialize_keras_object",
"(",
"self",
".",
"post_activation",
")",
"else",
":",
"serialized_activation",
"=",
"activations",
".",
"serialize",
"(",
"self",
".",
"post_activation",
")",
"config",
"=",
"{",
"'is_quantized'",
":",
"self",
".",
"is_quantized",
",",
"'post_activation'",
":",
"serialized_activation",
"}",
"return",
"dict",
"(",
"list",
"(",
"conv_config",
".",
"items",
"(",
")",
")",
"+",
"list",
"(",
"batchnorm_config",
".",
"items",
"(",
")",
")",
"+",
"list",
"(",
"config",
".",
"items",
"(",
")",
")",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/layers/conv_batchnorm.py#L123-L149
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/threading.py
|
python
|
Thread._delete
|
(self)
|
Remove current thread from the dict of currently running threads.
|
Remove current thread from the dict of currently running threads.
|
[
"Remove",
"current",
"thread",
"from",
"the",
"dict",
"of",
"currently",
"running",
"threads",
"."
] |
def _delete(self):
"Remove current thread from the dict of currently running threads."
with _active_limbo_lock:
del _active[get_ident()]
|
[
"def",
"_delete",
"(",
"self",
")",
":",
"with",
"_active_limbo_lock",
":",
"del",
"_active",
"[",
"get_ident",
"(",
")",
"]"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/threading.py#L1012-L1015
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scikit-learn/py3/sklearn/utils/estimator_checks.py
|
python
|
_set_check_estimator_ids
|
(obj)
|
Create pytest ids for checks.
When `obj` is an estimator, this returns the pprint version of the
estimator (with `print_changed_only=True`). When `obj` is a function, the
name of the function is returned with its keyworld arguments.
`_set_check_estimator_ids` is designed to be used as the `id` in
`pytest.mark.parametrize` where `check_estimator(..., generate_only=True)`
is yielding estimators and checks.
Parameters
----------
obj : estimator or function
Items generated by `check_estimator`
Returns
-------
id : string or None
See also
--------
check_estimator
|
Create pytest ids for checks.
|
[
"Create",
"pytest",
"ids",
"for",
"checks",
"."
] |
def _set_check_estimator_ids(obj):
"""Create pytest ids for checks.
When `obj` is an estimator, this returns the pprint version of the
estimator (with `print_changed_only=True`). When `obj` is a function, the
name of the function is returned with its keyworld arguments.
`_set_check_estimator_ids` is designed to be used as the `id` in
`pytest.mark.parametrize` where `check_estimator(..., generate_only=True)`
is yielding estimators and checks.
Parameters
----------
obj : estimator or function
Items generated by `check_estimator`
Returns
-------
id : string or None
See also
--------
check_estimator
"""
if callable(obj):
if not isinstance(obj, partial):
return obj.__name__
if not obj.keywords:
return obj.func.__name__
kwstring = "".join(["{}={}".format(k, v)
for k, v in obj.keywords.items()])
return "{}({})".format(obj.func.__name__, kwstring)
if hasattr(obj, "get_params"):
with config_context(print_changed_only=True):
return re.sub(r"\s", "", str(obj))
|
[
"def",
"_set_check_estimator_ids",
"(",
"obj",
")",
":",
"if",
"callable",
"(",
"obj",
")",
":",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"partial",
")",
":",
"return",
"obj",
".",
"__name__",
"if",
"not",
"obj",
".",
"keywords",
":",
"return",
"obj",
".",
"func",
".",
"__name__",
"kwstring",
"=",
"\"\"",
".",
"join",
"(",
"[",
"\"{}={}\"",
".",
"format",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"obj",
".",
"keywords",
".",
"items",
"(",
")",
"]",
")",
"return",
"\"{}({})\"",
".",
"format",
"(",
"obj",
".",
"func",
".",
"__name__",
",",
"kwstring",
")",
"if",
"hasattr",
"(",
"obj",
",",
"\"get_params\"",
")",
":",
"with",
"config_context",
"(",
"print_changed_only",
"=",
"True",
")",
":",
"return",
"re",
".",
"sub",
"(",
"r\"\\s\"",
",",
"\"\"",
",",
"str",
"(",
"obj",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/utils/estimator_checks.py#L283-L319
|
||
genn-team/genn
|
75e1eb218cafa228bf36ae4613d1ce26e877b12c
|
pygenn/genn_model.py
|
python
|
create_cksf_class
|
(cks_func)
|
return type("", (cksf,), {"__init__": ctor, "__call__": call})
|
Helper function to create function class for calculating sizes
of kernels from connectivity initialiser parameters
Args:
cks_func -- a function which computes the kernel size and takes
one arg "pars" (vector of double)
|
Helper function to create function class for calculating sizes
of kernels from connectivity initialiser parameters
|
[
"Helper",
"function",
"to",
"create",
"function",
"class",
"for",
"calculating",
"sizes",
"of",
"kernels",
"from",
"connectivity",
"initialiser",
"parameters"
] |
def create_cksf_class(cks_func):
"""Helper function to create function class for calculating sizes
of kernels from connectivity initialiser parameters
Args:
cks_func -- a function which computes the kernel size and takes
one arg "pars" (vector of double)
"""
cksf = genn_wrapper.InitSparseConnectivitySnippet.CalcKernelSizeFunc
def ctor(self):
cksf.__init__(self)
def call(self, pars):
return cks_func(pars)
return type("", (cksf,), {"__init__": ctor, "__call__": call})
|
[
"def",
"create_cksf_class",
"(",
"cks_func",
")",
":",
"cksf",
"=",
"genn_wrapper",
".",
"InitSparseConnectivitySnippet",
".",
"CalcKernelSizeFunc",
"def",
"ctor",
"(",
"self",
")",
":",
"cksf",
".",
"__init__",
"(",
"self",
")",
"def",
"call",
"(",
"self",
",",
"pars",
")",
":",
"return",
"cks_func",
"(",
"pars",
")",
"return",
"type",
"(",
"\"\"",
",",
"(",
"cksf",
",",
")",
",",
"{",
"\"__init__\"",
":",
"ctor",
",",
"\"__call__\"",
":",
"call",
"}",
")"
] |
https://github.com/genn-team/genn/blob/75e1eb218cafa228bf36ae4613d1ce26e877b12c/pygenn/genn_model.py#L1546-L1562
|
|
RamadhanAmizudin/malware
|
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
|
Fuzzbunch/fuzzbunch/pluginmanager.py
|
python
|
PluginManager.do_validate
|
(self, *ignore)
|
Validate the current parameter settings
|
Validate the current parameter settings
|
[
"Validate",
"the",
"current",
"parameter",
"settings"
] |
def do_validate(self, *ignore):
"""Validate the current parameter settings"""
plugin = self.get_active_plugin()
self.io.print_msg("Checking %s parameters" % plugin.getName())
self.io.newline()
if plugin.validate(self.session.get_dirs(), globalvars=self.fb.fbglobalvars) and self.activePlugin.isValid():
self.io.print_success("Parameters are valid")
else:
self.io.print_error("Parameter check failed")
|
[
"def",
"do_validate",
"(",
"self",
",",
"*",
"ignore",
")",
":",
"plugin",
"=",
"self",
".",
"get_active_plugin",
"(",
")",
"self",
".",
"io",
".",
"print_msg",
"(",
"\"Checking %s parameters\"",
"%",
"plugin",
".",
"getName",
"(",
")",
")",
"self",
".",
"io",
".",
"newline",
"(",
")",
"if",
"plugin",
".",
"validate",
"(",
"self",
".",
"session",
".",
"get_dirs",
"(",
")",
",",
"globalvars",
"=",
"self",
".",
"fb",
".",
"fbglobalvars",
")",
"and",
"self",
".",
"activePlugin",
".",
"isValid",
"(",
")",
":",
"self",
".",
"io",
".",
"print_success",
"(",
"\"Parameters are valid\"",
")",
"else",
":",
"self",
".",
"io",
".",
"print_error",
"(",
"\"Parameter check failed\"",
")"
] |
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/pluginmanager.py#L243-L252
|
||
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/contrib/linalg/python/ops/linear_operator.py
|
python
|
LinearOperator.assert_positive_definite
|
(self, name="assert_positive_definite")
|
Returns an `Op` that asserts this operator is positive definite.
Here, positive definite means that the quadratic form `x^H A x` has positive
real part for all nonzero `x`. Note that we do not require the operator to
be self-adjoint to be positive definite.
Args:
name: A name to give this `Op`.
Returns:
An `Assert` `Op`, that, when run, will raise an `InvalidArgumentError` if
the operator is not positive definite.
|
Returns an `Op` that asserts this operator is positive definite.
|
[
"Returns",
"an",
"Op",
"that",
"asserts",
"this",
"operator",
"is",
"positive",
"definite",
"."
] |
def assert_positive_definite(self, name="assert_positive_definite"):
"""Returns an `Op` that asserts this operator is positive definite.
Here, positive definite means that the quadratic form `x^H A x` has positive
real part for all nonzero `x`. Note that we do not require the operator to
be self-adjoint to be positive definite.
Args:
name: A name to give this `Op`.
Returns:
An `Assert` `Op`, that, when run, will raise an `InvalidArgumentError` if
the operator is not positive definite.
"""
with self._name_scope(name):
return self._assert_positive_definite()
|
[
"def",
"assert_positive_definite",
"(",
"self",
",",
"name",
"=",
"\"assert_positive_definite\"",
")",
":",
"with",
"self",
".",
"_name_scope",
"(",
"name",
")",
":",
"return",
"self",
".",
"_assert_positive_definite",
"(",
")"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/linalg/python/ops/linear_operator.py#L530-L545
|
||
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/feature_column/feature_column_v2.py
|
python
|
is_feature_column_v2
|
(feature_columns)
|
return True
|
Returns True if all feature columns are V2.
|
Returns True if all feature columns are V2.
|
[
"Returns",
"True",
"if",
"all",
"feature",
"columns",
"are",
"V2",
"."
] |
def is_feature_column_v2(feature_columns):
"""Returns True if all feature columns are V2."""
for feature_column in feature_columns:
if not isinstance(feature_column, FeatureColumn):
return False
if not feature_column._is_v2_column: # pylint: disable=protected-access
return False
return True
|
[
"def",
"is_feature_column_v2",
"(",
"feature_columns",
")",
":",
"for",
"feature_column",
"in",
"feature_columns",
":",
"if",
"not",
"isinstance",
"(",
"feature_column",
",",
"FeatureColumn",
")",
":",
"return",
"False",
"if",
"not",
"feature_column",
".",
"_is_v2_column",
":",
"# pylint: disable=protected-access",
"return",
"False",
"return",
"True"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/feature_column/feature_column_v2.py#L2217-L2224
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/multiprocessing/util.py
|
python
|
get_logger
|
()
|
return _logger
|
Returns logger used by multiprocessing
|
Returns logger used by multiprocessing
|
[
"Returns",
"logger",
"used",
"by",
"multiprocessing"
] |
def get_logger():
'''
Returns logger used by multiprocessing
'''
global _logger
import logging, atexit
logging._acquireLock()
try:
if not _logger:
_logger = logging.getLogger(LOGGER_NAME)
_logger.propagate = 0
logging.addLevelName(SUBDEBUG, 'SUBDEBUG')
logging.addLevelName(SUBWARNING, 'SUBWARNING')
# XXX multiprocessing should cleanup before logging
if hasattr(atexit, 'unregister'):
atexit.unregister(_exit_function)
atexit.register(_exit_function)
else:
atexit._exithandlers.remove((_exit_function, (), {}))
atexit._exithandlers.append((_exit_function, (), {}))
finally:
logging._releaseLock()
return _logger
|
[
"def",
"get_logger",
"(",
")",
":",
"global",
"_logger",
"import",
"logging",
",",
"atexit",
"logging",
".",
"_acquireLock",
"(",
")",
"try",
":",
"if",
"not",
"_logger",
":",
"_logger",
"=",
"logging",
".",
"getLogger",
"(",
"LOGGER_NAME",
")",
"_logger",
".",
"propagate",
"=",
"0",
"logging",
".",
"addLevelName",
"(",
"SUBDEBUG",
",",
"'SUBDEBUG'",
")",
"logging",
".",
"addLevelName",
"(",
"SUBWARNING",
",",
"'SUBWARNING'",
")",
"# XXX multiprocessing should cleanup before logging",
"if",
"hasattr",
"(",
"atexit",
",",
"'unregister'",
")",
":",
"atexit",
".",
"unregister",
"(",
"_exit_function",
")",
"atexit",
".",
"register",
"(",
"_exit_function",
")",
"else",
":",
"atexit",
".",
"_exithandlers",
".",
"remove",
"(",
"(",
"_exit_function",
",",
"(",
")",
",",
"{",
"}",
")",
")",
"atexit",
".",
"_exithandlers",
".",
"append",
"(",
"(",
"_exit_function",
",",
"(",
")",
",",
"{",
"}",
")",
")",
"finally",
":",
"logging",
".",
"_releaseLock",
"(",
")",
"return",
"_logger"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/multiprocessing/util.py#L84-L111
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/html2.py
|
python
|
WebView.GetSelectedText
|
(*args, **kwargs)
|
return _html2.WebView_GetSelectedText(*args, **kwargs)
|
GetSelectedText(self) -> String
|
GetSelectedText(self) -> String
|
[
"GetSelectedText",
"(",
"self",
")",
"-",
">",
"String"
] |
def GetSelectedText(*args, **kwargs):
"""GetSelectedText(self) -> String"""
return _html2.WebView_GetSelectedText(*args, **kwargs)
|
[
"def",
"GetSelectedText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_html2",
".",
"WebView_GetSelectedText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html2.py#L278-L280
|
|
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/contrib/timeseries/python/timeseries/state_space_models/varma.py
|
python
|
VARMA.__init__
|
(self,
autoregressive_order,
moving_average_order,
configuration=state_space_model.StateSpaceModelConfiguration())
|
Construct a VARMA model.
The size of the latent state for this model is:
num_features * max(autoregressive_order, moving_average_order + 1)
Square matrices of this size are constructed and multiplied.
Args:
autoregressive_order: The maximum autoregressive lag.
moving_average_order: The maximum moving average lag, after which
transient deviations are expected to return to their long-term mean.
configuration: A StateSpaceModelConfiguration object.
|
Construct a VARMA model.
|
[
"Construct",
"a",
"VARMA",
"model",
"."
] |
def __init__(self,
autoregressive_order,
moving_average_order,
configuration=state_space_model.StateSpaceModelConfiguration()):
"""Construct a VARMA model.
The size of the latent state for this model is:
num_features * max(autoregressive_order, moving_average_order + 1)
Square matrices of this size are constructed and multiplied.
Args:
autoregressive_order: The maximum autoregressive lag.
moving_average_order: The maximum moving average lag, after which
transient deviations are expected to return to their long-term mean.
configuration: A StateSpaceModelConfiguration object.
"""
self.ar_order = autoregressive_order
self.ma_order = moving_average_order
self.state_num_blocks = max(autoregressive_order, moving_average_order + 1)
super(VARMA, self).__init__(configuration=configuration)
self.state_dimension = self.state_num_blocks * self.num_features
|
[
"def",
"__init__",
"(",
"self",
",",
"autoregressive_order",
",",
"moving_average_order",
",",
"configuration",
"=",
"state_space_model",
".",
"StateSpaceModelConfiguration",
"(",
")",
")",
":",
"self",
".",
"ar_order",
"=",
"autoregressive_order",
"self",
".",
"ma_order",
"=",
"moving_average_order",
"self",
".",
"state_num_blocks",
"=",
"max",
"(",
"autoregressive_order",
",",
"moving_average_order",
"+",
"1",
")",
"super",
"(",
"VARMA",
",",
"self",
")",
".",
"__init__",
"(",
"configuration",
"=",
"configuration",
")",
"self",
".",
"state_dimension",
"=",
"self",
".",
"state_num_blocks",
"*",
"self",
".",
"num_features"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/timeseries/python/timeseries/state_space_models/varma.py#L65-L85
|
||
VowpalWabbit/vowpal_wabbit
|
866b8fa88ff85a957c7eb72065ea44518b9ba416
|
python/vowpalwabbit/sklearn.py
|
python
|
VWMultiClassifier.predict_proba
|
(self, X)
|
return VW.predict(self, X=X)
|
Predict probabilities for each class.
Args:
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns:
array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes)
Confidence scores per (sample, class) combination. In the binary
case, confidence score for self.classes_[1] where >0 means this
class would be predicted.
Examples:
>>> import numpy as np
>>> X = np.array([ [10, 10], [8, 10], [-5, 5.5], [-5.4, 5.5], [-20, -20], [-15, -20] ])
>>> y = np.array([1, 1, 2, 2, 3, 3])
>>> from vowpalwabbit.sklearn import VWMultiClassifier
>>> model = VWMultiClassifier(oaa=3, loss_function='logistic')
>>> _ = model.fit(X, y)
>>> model.predict_proba(X)
array([[0.38928846, 0.30534211, 0.30536944],
[0.40664235, 0.29666999, 0.29668769],
[0.52324486, 0.23841164, 0.23834346],
[0.5268591 , 0.23660533, 0.23653553],
[0.65397811, 0.17312808, 0.17289382],
[0.61190444, 0.19416356, 0.19393198]])
|
Predict probabilities for each class.
|
[
"Predict",
"probabilities",
"for",
"each",
"class",
"."
] |
def predict_proba(self, X):
"""Predict probabilities for each class.
Args:
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns:
array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes)
Confidence scores per (sample, class) combination. In the binary
case, confidence score for self.classes_[1] where >0 means this
class would be predicted.
Examples:
>>> import numpy as np
>>> X = np.array([ [10, 10], [8, 10], [-5, 5.5], [-5.4, 5.5], [-20, -20], [-15, -20] ])
>>> y = np.array([1, 1, 2, 2, 3, 3])
>>> from vowpalwabbit.sklearn import VWMultiClassifier
>>> model = VWMultiClassifier(oaa=3, loss_function='logistic')
>>> _ = model.fit(X, y)
>>> model.predict_proba(X)
array([[0.38928846, 0.30534211, 0.30536944],
[0.40664235, 0.29666999, 0.29668769],
[0.52324486, 0.23841164, 0.23834346],
[0.5268591 , 0.23660533, 0.23653553],
[0.65397811, 0.17312808, 0.17289382],
[0.61190444, 0.19416356, 0.19393198]])
"""
return VW.predict(self, X=X)
|
[
"def",
"predict_proba",
"(",
"self",
",",
"X",
")",
":",
"return",
"VW",
".",
"predict",
"(",
"self",
",",
"X",
"=",
"X",
")"
] |
https://github.com/VowpalWabbit/vowpal_wabbit/blob/866b8fa88ff85a957c7eb72065ea44518b9ba416/python/vowpalwabbit/sklearn.py#L720-L748
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/grid.py
|
python
|
Grid.MovePageDown
|
(*args, **kwargs)
|
return _grid.Grid_MovePageDown(*args, **kwargs)
|
MovePageDown(self) -> bool
|
MovePageDown(self) -> bool
|
[
"MovePageDown",
"(",
"self",
")",
"-",
">",
"bool"
] |
def MovePageDown(*args, **kwargs):
"""MovePageDown(self) -> bool"""
return _grid.Grid_MovePageDown(*args, **kwargs)
|
[
"def",
"MovePageDown",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_MovePageDown",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L1446-L1448
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_gdi.py
|
python
|
Bitmap.SetDepth
|
(*args, **kwargs)
|
return _gdi_.Bitmap_SetDepth(*args, **kwargs)
|
SetDepth(self, int depth)
Set the depth property (does not affect the existing bitmap data).
|
SetDepth(self, int depth)
|
[
"SetDepth",
"(",
"self",
"int",
"depth",
")"
] |
def SetDepth(*args, **kwargs):
"""
SetDepth(self, int depth)
Set the depth property (does not affect the existing bitmap data).
"""
return _gdi_.Bitmap_SetDepth(*args, **kwargs)
|
[
"def",
"SetDepth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"Bitmap_SetDepth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_gdi.py#L773-L779
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/_vendor/pyparsing.py
|
python
|
ParseBaseException.__getattr__
|
( self, aname )
|
supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
|
supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
|
[
"supported",
"attributes",
"by",
"name",
"are",
":",
"-",
"lineno",
"-",
"returns",
"the",
"line",
"number",
"of",
"the",
"exception",
"text",
"-",
"col",
"-",
"returns",
"the",
"column",
"number",
"of",
"the",
"exception",
"text",
"-",
"line",
"-",
"returns",
"the",
"line",
"containing",
"the",
"exception",
"text"
] |
def __getattr__( self, aname ):
"""supported attributes by name are:
- lineno - returns the line number of the exception text
- col - returns the column number of the exception text
- line - returns the line containing the exception text
"""
if( aname == "lineno" ):
return lineno( self.loc, self.pstr )
elif( aname in ("col", "column") ):
return col( self.loc, self.pstr )
elif( aname == "line" ):
return line( self.loc, self.pstr )
else:
raise AttributeError(aname)
|
[
"def",
"__getattr__",
"(",
"self",
",",
"aname",
")",
":",
"if",
"(",
"aname",
"==",
"\"lineno\"",
")",
":",
"return",
"lineno",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"elif",
"(",
"aname",
"in",
"(",
"\"col\"",
",",
"\"column\"",
")",
")",
":",
"return",
"col",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"elif",
"(",
"aname",
"==",
"\"line\"",
")",
":",
"return",
"line",
"(",
"self",
".",
"loc",
",",
"self",
".",
"pstr",
")",
"else",
":",
"raise",
"AttributeError",
"(",
"aname",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/_vendor/pyparsing.py#L228-L241
|
||
idaholab/moose
|
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
|
python/MooseDocs/base/renderers.py
|
python
|
Renderer.__getFunction
|
(self, token)
|
return self.__functions.get(token.name, None)
|
Return the desired function for the supplied token object.
Inputs:
token[tree.token]: token for which the associated RenderComponent function is desired.
|
Return the desired function for the supplied token object.
|
[
"Return",
"the",
"desired",
"function",
"for",
"the",
"supplied",
"token",
"object",
"."
] |
def __getFunction(self, token):
"""
Return the desired function for the supplied token object.
Inputs:
token[tree.token]: token for which the associated RenderComponent function is desired.
"""
return self.__functions.get(token.name, None)
|
[
"def",
"__getFunction",
"(",
"self",
",",
"token",
")",
":",
"return",
"self",
".",
"__functions",
".",
"get",
"(",
"token",
".",
"name",
",",
"None",
")"
] |
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/MooseDocs/base/renderers.py#L210-L217
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/imaplib.py
|
python
|
Int2AP
|
(num)
|
return val
|
Convert integer to A-P string representation.
|
Convert integer to A-P string representation.
|
[
"Convert",
"integer",
"to",
"A",
"-",
"P",
"string",
"representation",
"."
] |
def Int2AP(num):
"""Convert integer to A-P string representation."""
val = b''; AP = b'ABCDEFGHIJKLMNOP'
num = int(abs(num))
while num:
num, mod = divmod(num, 16)
val = AP[mod:mod+1] + val
return val
|
[
"def",
"Int2AP",
"(",
"num",
")",
":",
"val",
"=",
"b''",
"AP",
"=",
"b'ABCDEFGHIJKLMNOP'",
"num",
"=",
"int",
"(",
"abs",
"(",
"num",
")",
")",
"while",
"num",
":",
"num",
",",
"mod",
"=",
"divmod",
"(",
"num",
",",
"16",
")",
"val",
"=",
"AP",
"[",
"mod",
":",
"mod",
"+",
"1",
"]",
"+",
"val",
"return",
"val"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/imaplib.py#L1445-L1454
|
|
GrammaTech/gtirb
|
415dd72e1e3c475004d013723c16cdcb29c0826e
|
python/gtirb/module.py
|
python
|
Module.code_blocks_at
|
(
self, addrs: typing.Union[int, range]
)
|
return itertools.chain.from_iterable(
s.code_blocks_at(addrs) for s in self.sections
)
|
Finds all the code blocks that begin at an address or range of
addresses.
:param addrs: Either a ``range`` object or a single address.
|
Finds all the code blocks that begin at an address or range of
addresses.
|
[
"Finds",
"all",
"the",
"code",
"blocks",
"that",
"begin",
"at",
"an",
"address",
"or",
"range",
"of",
"addresses",
"."
] |
def code_blocks_at(
self, addrs: typing.Union[int, range]
) -> typing.Iterable[CodeBlock]:
"""Finds all the code blocks that begin at an address or range of
addresses.
:param addrs: Either a ``range`` object or a single address.
"""
return itertools.chain.from_iterable(
s.code_blocks_at(addrs) for s in self.sections
)
|
[
"def",
"code_blocks_at",
"(",
"self",
",",
"addrs",
":",
"typing",
".",
"Union",
"[",
"int",
",",
"range",
"]",
")",
"->",
"typing",
".",
"Iterable",
"[",
"CodeBlock",
"]",
":",
"return",
"itertools",
".",
"chain",
".",
"from_iterable",
"(",
"s",
".",
"code_blocks_at",
"(",
"addrs",
")",
"for",
"s",
"in",
"self",
".",
"sections",
")"
] |
https://github.com/GrammaTech/gtirb/blob/415dd72e1e3c475004d013723c16cdcb29c0826e/python/gtirb/module.py#L523-L534
|
|
ros-planning/moveit
|
ee48dc5cedc981d0869352aa3db0b41469c2735c
|
moveit_commander/src/moveit_commander/move_group.py
|
python
|
MoveGroupCommander.set_joint_value_target
|
(self, arg1, arg2=None, arg3=None)
|
Specify a target joint configuration for the group.
- if the type of arg1 is one of the following: dict, list, JointState message, then no other arguments should be provided.
The dict should specify pairs of joint variable names and their target values, the list should specify all the variable values
for the group. The JointState message specifies the positions of some single-dof joints.
- if the type of arg1 is string, then arg2 is expected to be defined and be either a real value or a list of real values. This is
interpreted as setting a particular joint to a particular value.
- if the type of arg1 is Pose or PoseStamped, both arg2 and arg3 could be defined. If arg2 or arg3 are defined, their types must
be either string or bool. The string type argument is interpreted as the end-effector the pose is specified for (default is to use
the default end-effector), and the bool is used to decide whether the pose specified is approximate (default is false). This situation
allows setting the joint target of the group by calling IK. This does not send a pose to the planner and the planner will do no IK.
Instead, one IK solution will be computed first, and that will be sent to the planner.
|
Specify a target joint configuration for the group.
- if the type of arg1 is one of the following: dict, list, JointState message, then no other arguments should be provided.
The dict should specify pairs of joint variable names and their target values, the list should specify all the variable values
for the group. The JointState message specifies the positions of some single-dof joints.
- if the type of arg1 is string, then arg2 is expected to be defined and be either a real value or a list of real values. This is
interpreted as setting a particular joint to a particular value.
- if the type of arg1 is Pose or PoseStamped, both arg2 and arg3 could be defined. If arg2 or arg3 are defined, their types must
be either string or bool. The string type argument is interpreted as the end-effector the pose is specified for (default is to use
the default end-effector), and the bool is used to decide whether the pose specified is approximate (default is false). This situation
allows setting the joint target of the group by calling IK. This does not send a pose to the planner and the planner will do no IK.
Instead, one IK solution will be computed first, and that will be sent to the planner.
|
[
"Specify",
"a",
"target",
"joint",
"configuration",
"for",
"the",
"group",
".",
"-",
"if",
"the",
"type",
"of",
"arg1",
"is",
"one",
"of",
"the",
"following",
":",
"dict",
"list",
"JointState",
"message",
"then",
"no",
"other",
"arguments",
"should",
"be",
"provided",
".",
"The",
"dict",
"should",
"specify",
"pairs",
"of",
"joint",
"variable",
"names",
"and",
"their",
"target",
"values",
"the",
"list",
"should",
"specify",
"all",
"the",
"variable",
"values",
"for",
"the",
"group",
".",
"The",
"JointState",
"message",
"specifies",
"the",
"positions",
"of",
"some",
"single",
"-",
"dof",
"joints",
".",
"-",
"if",
"the",
"type",
"of",
"arg1",
"is",
"string",
"then",
"arg2",
"is",
"expected",
"to",
"be",
"defined",
"and",
"be",
"either",
"a",
"real",
"value",
"or",
"a",
"list",
"of",
"real",
"values",
".",
"This",
"is",
"interpreted",
"as",
"setting",
"a",
"particular",
"joint",
"to",
"a",
"particular",
"value",
".",
"-",
"if",
"the",
"type",
"of",
"arg1",
"is",
"Pose",
"or",
"PoseStamped",
"both",
"arg2",
"and",
"arg3",
"could",
"be",
"defined",
".",
"If",
"arg2",
"or",
"arg3",
"are",
"defined",
"their",
"types",
"must",
"be",
"either",
"string",
"or",
"bool",
".",
"The",
"string",
"type",
"argument",
"is",
"interpreted",
"as",
"the",
"end",
"-",
"effector",
"the",
"pose",
"is",
"specified",
"for",
"(",
"default",
"is",
"to",
"use",
"the",
"default",
"end",
"-",
"effector",
")",
"and",
"the",
"bool",
"is",
"used",
"to",
"decide",
"whether",
"the",
"pose",
"specified",
"is",
"approximate",
"(",
"default",
"is",
"false",
")",
".",
"This",
"situation",
"allows",
"setting",
"the",
"joint",
"target",
"of",
"the",
"group",
"by",
"calling",
"IK",
".",
"This",
"does",
"not",
"send",
"a",
"pose",
"to",
"the",
"planner",
"and",
"the",
"planner",
"will",
"do",
"no",
"IK",
".",
"Instead",
"one",
"IK",
"solution",
"will",
"be",
"computed",
"first",
"and",
"that",
"will",
"be",
"sent",
"to",
"the",
"planner",
"."
] |
def set_joint_value_target(self, arg1, arg2=None, arg3=None):
"""
Specify a target joint configuration for the group.
- if the type of arg1 is one of the following: dict, list, JointState message, then no other arguments should be provided.
The dict should specify pairs of joint variable names and their target values, the list should specify all the variable values
for the group. The JointState message specifies the positions of some single-dof joints.
- if the type of arg1 is string, then arg2 is expected to be defined and be either a real value or a list of real values. This is
interpreted as setting a particular joint to a particular value.
- if the type of arg1 is Pose or PoseStamped, both arg2 and arg3 could be defined. If arg2 or arg3 are defined, their types must
be either string or bool. The string type argument is interpreted as the end-effector the pose is specified for (default is to use
the default end-effector), and the bool is used to decide whether the pose specified is approximate (default is false). This situation
allows setting the joint target of the group by calling IK. This does not send a pose to the planner and the planner will do no IK.
Instead, one IK solution will be computed first, and that will be sent to the planner.
"""
if isinstance(arg1, RobotState):
if not self._g.set_state_value_target(conversions.msg_to_string(arg1)):
raise MoveItCommanderException(
"Error setting state target. Is the target state within bounds?"
)
elif isinstance(arg1, JointState):
if arg2 is not None or arg3 is not None:
raise MoveItCommanderException("Too many arguments specified")
if not self._g.set_joint_value_target_from_joint_state_message(
conversions.msg_to_string(arg1)
):
raise MoveItCommanderException(
"Error setting joint target. Is the target within bounds?"
)
elif isinstance(arg1, str):
if arg2 is None:
raise MoveItCommanderException(
"Joint value expected when joint name specified"
)
if arg3 is not None:
raise MoveItCommanderException("Too many arguments specified")
if not self._g.set_joint_value_target(arg1, arg2):
raise MoveItCommanderException(
"Error setting joint target. Is the target within bounds?"
)
elif isinstance(arg1, (Pose, PoseStamped)):
approx = False
eef = ""
if arg2 is not None:
if type(arg2) is str:
eef = arg2
else:
if type(arg2) is bool:
approx = arg2
else:
raise MoveItCommanderException("Unexpected type")
if arg3 is not None:
if type(arg3) is str:
eef = arg3
else:
if type(arg3) is bool:
approx = arg3
else:
raise MoveItCommanderException("Unexpected type")
r = False
if type(arg1) is PoseStamped:
r = self._g.set_joint_value_target_from_pose_stamped(
conversions.msg_to_string(arg1), eef, approx
)
else:
r = self._g.set_joint_value_target_from_pose(
conversions.msg_to_string(arg1), eef, approx
)
if not r:
if approx:
raise MoveItCommanderException(
"Error setting joint target. Does your IK solver support approximate IK?"
)
else:
raise MoveItCommanderException(
"Error setting joint target. Is the IK solver functional?"
)
elif hasattr(arg1, "__iter__"):
if arg2 is not None or arg3 is not None:
raise MoveItCommanderException("Too many arguments specified")
if not self._g.set_joint_value_target(arg1):
raise MoveItCommanderException(
"Error setting joint target. Is the target within bounds?"
)
else:
raise MoveItCommanderException(
"Unsupported argument of type %s" % type(arg1)
)
|
[
"def",
"set_joint_value_target",
"(",
"self",
",",
"arg1",
",",
"arg2",
"=",
"None",
",",
"arg3",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"arg1",
",",
"RobotState",
")",
":",
"if",
"not",
"self",
".",
"_g",
".",
"set_state_value_target",
"(",
"conversions",
".",
"msg_to_string",
"(",
"arg1",
")",
")",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting state target. Is the target state within bounds?\"",
")",
"elif",
"isinstance",
"(",
"arg1",
",",
"JointState",
")",
":",
"if",
"arg2",
"is",
"not",
"None",
"or",
"arg3",
"is",
"not",
"None",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Too many arguments specified\"",
")",
"if",
"not",
"self",
".",
"_g",
".",
"set_joint_value_target_from_joint_state_message",
"(",
"conversions",
".",
"msg_to_string",
"(",
"arg1",
")",
")",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting joint target. Is the target within bounds?\"",
")",
"elif",
"isinstance",
"(",
"arg1",
",",
"str",
")",
":",
"if",
"arg2",
"is",
"None",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Joint value expected when joint name specified\"",
")",
"if",
"arg3",
"is",
"not",
"None",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Too many arguments specified\"",
")",
"if",
"not",
"self",
".",
"_g",
".",
"set_joint_value_target",
"(",
"arg1",
",",
"arg2",
")",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting joint target. Is the target within bounds?\"",
")",
"elif",
"isinstance",
"(",
"arg1",
",",
"(",
"Pose",
",",
"PoseStamped",
")",
")",
":",
"approx",
"=",
"False",
"eef",
"=",
"\"\"",
"if",
"arg2",
"is",
"not",
"None",
":",
"if",
"type",
"(",
"arg2",
")",
"is",
"str",
":",
"eef",
"=",
"arg2",
"else",
":",
"if",
"type",
"(",
"arg2",
")",
"is",
"bool",
":",
"approx",
"=",
"arg2",
"else",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Unexpected type\"",
")",
"if",
"arg3",
"is",
"not",
"None",
":",
"if",
"type",
"(",
"arg3",
")",
"is",
"str",
":",
"eef",
"=",
"arg3",
"else",
":",
"if",
"type",
"(",
"arg3",
")",
"is",
"bool",
":",
"approx",
"=",
"arg3",
"else",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Unexpected type\"",
")",
"r",
"=",
"False",
"if",
"type",
"(",
"arg1",
")",
"is",
"PoseStamped",
":",
"r",
"=",
"self",
".",
"_g",
".",
"set_joint_value_target_from_pose_stamped",
"(",
"conversions",
".",
"msg_to_string",
"(",
"arg1",
")",
",",
"eef",
",",
"approx",
")",
"else",
":",
"r",
"=",
"self",
".",
"_g",
".",
"set_joint_value_target_from_pose",
"(",
"conversions",
".",
"msg_to_string",
"(",
"arg1",
")",
",",
"eef",
",",
"approx",
")",
"if",
"not",
"r",
":",
"if",
"approx",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting joint target. Does your IK solver support approximate IK?\"",
")",
"else",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting joint target. Is the IK solver functional?\"",
")",
"elif",
"hasattr",
"(",
"arg1",
",",
"\"__iter__\"",
")",
":",
"if",
"arg2",
"is",
"not",
"None",
"or",
"arg3",
"is",
"not",
"None",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Too many arguments specified\"",
")",
"if",
"not",
"self",
".",
"_g",
".",
"set_joint_value_target",
"(",
"arg1",
")",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Error setting joint target. Is the target within bounds?\"",
")",
"else",
":",
"raise",
"MoveItCommanderException",
"(",
"\"Unsupported argument of type %s\"",
"%",
"type",
"(",
"arg1",
")",
")"
] |
https://github.com/ros-planning/moveit/blob/ee48dc5cedc981d0869352aa3db0b41469c2735c/moveit_commander/src/moveit_commander/move_group.py#L199-L290
|
||
openmm/openmm
|
cb293447c4fc8b03976dfe11399f107bab70f3d9
|
wrappers/python/openmm/app/topology.py
|
python
|
Topology.addAtom
|
(self, name, element, residue, id=None)
|
return atom
|
Create a new Atom and add it to the Topology.
Parameters
----------
name : string
The name of the atom to add
element : Element
The element of the atom to add
residue : Residue
The Residue to add it to
id : string=None
An optional identifier for the atom. If this is omitted, an id is
generated based on the atom index.
Returns
-------
Atom
the newly created Atom
|
Create a new Atom and add it to the Topology.
|
[
"Create",
"a",
"new",
"Atom",
"and",
"add",
"it",
"to",
"the",
"Topology",
"."
] |
def addAtom(self, name, element, residue, id=None):
"""Create a new Atom and add it to the Topology.
Parameters
----------
name : string
The name of the atom to add
element : Element
The element of the atom to add
residue : Residue
The Residue to add it to
id : string=None
An optional identifier for the atom. If this is omitted, an id is
generated based on the atom index.
Returns
-------
Atom
the newly created Atom
"""
if len(residue._atoms) > 0 and self._numAtoms != residue._atoms[-1].index+1:
raise ValueError('All atoms within a residue must be contiguous')
if id is None:
id = str(self._numAtoms+1)
atom = Atom(name, element, self._numAtoms, residue, id)
self._numAtoms += 1
residue._atoms.append(atom)
return atom
|
[
"def",
"addAtom",
"(",
"self",
",",
"name",
",",
"element",
",",
"residue",
",",
"id",
"=",
"None",
")",
":",
"if",
"len",
"(",
"residue",
".",
"_atoms",
")",
">",
"0",
"and",
"self",
".",
"_numAtoms",
"!=",
"residue",
".",
"_atoms",
"[",
"-",
"1",
"]",
".",
"index",
"+",
"1",
":",
"raise",
"ValueError",
"(",
"'All atoms within a residue must be contiguous'",
")",
"if",
"id",
"is",
"None",
":",
"id",
"=",
"str",
"(",
"self",
".",
"_numAtoms",
"+",
"1",
")",
"atom",
"=",
"Atom",
"(",
"name",
",",
"element",
",",
"self",
".",
"_numAtoms",
",",
"residue",
",",
"id",
")",
"self",
".",
"_numAtoms",
"+=",
"1",
"residue",
".",
"_atoms",
".",
"append",
"(",
"atom",
")",
"return",
"atom"
] |
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/app/topology.py#L169-L196
|
|
Xilinx/XRT
|
dd071c90309df61d3ecdd92dca39f43804915c99
|
src/python/xrt_binding.py
|
python
|
xclLockDevice
|
(handle)
|
return 0
|
The function is NOP; it exists for backward compatiblity.
|
The function is NOP; it exists for backward compatiblity.
|
[
"The",
"function",
"is",
"NOP",
";",
"it",
"exists",
"for",
"backward",
"compatiblity",
"."
] |
def xclLockDevice(handle):
"""
The function is NOP; it exists for backward compatiblity.
"""
_xclDeprecation(sys._getframe().f_code.co_name)
return 0
|
[
"def",
"xclLockDevice",
"(",
"handle",
")",
":",
"_xclDeprecation",
"(",
"sys",
".",
"_getframe",
"(",
")",
".",
"f_code",
".",
"co_name",
")",
"return",
"0"
] |
https://github.com/Xilinx/XRT/blob/dd071c90309df61d3ecdd92dca39f43804915c99/src/python/xrt_binding.py#L320-L325
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILL_common.py
|
python
|
convertToWorkspaceIndex
|
(i, ws, indexType=INDEX_TYPE_DET_ID)
|
Convert given number to workspace index.
|
Convert given number to workspace index.
|
[
"Convert",
"given",
"number",
"to",
"workspace",
"index",
"."
] |
def convertToWorkspaceIndex(i, ws, indexType=INDEX_TYPE_DET_ID):
"""Convert given number to workspace index."""
if indexType == INDEX_TYPE_WS_INDEX:
return i
elif indexType == INDEX_TYPE_SPECTRUM_NUMBER:
return ws.getIndexFromSpectrumNumber(i)
else: # INDEX_TYPE_DET_ID
for j in range(ws.getNumberHistograms()):
if ws.getSpectrum(j).hasDetectorID(i):
return j
raise RuntimeError('No workspace index found for detector id {0}'.format(i))
|
[
"def",
"convertToWorkspaceIndex",
"(",
"i",
",",
"ws",
",",
"indexType",
"=",
"INDEX_TYPE_DET_ID",
")",
":",
"if",
"indexType",
"==",
"INDEX_TYPE_WS_INDEX",
":",
"return",
"i",
"elif",
"indexType",
"==",
"INDEX_TYPE_SPECTRUM_NUMBER",
":",
"return",
"ws",
".",
"getIndexFromSpectrumNumber",
"(",
"i",
")",
"else",
":",
"# INDEX_TYPE_DET_ID",
"for",
"j",
"in",
"range",
"(",
"ws",
".",
"getNumberHistograms",
"(",
")",
")",
":",
"if",
"ws",
".",
"getSpectrum",
"(",
"j",
")",
".",
"hasDetectorID",
"(",
"i",
")",
":",
"return",
"j",
"raise",
"RuntimeError",
"(",
"'No workspace index found for detector id {0}'",
".",
"format",
"(",
"i",
")",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/DirectILL_common.py#L136-L146
|
||
OGRECave/ogre-next
|
287307980e6de8910f04f3cc0994451b075071fd
|
Tools/Wings3DExporter/pgon.py
|
python
|
triangulate
|
(pgon)
|
return t.process()
|
triangulate a polygon defined by its vertices
|
triangulate a polygon defined by its vertices
|
[
"triangulate",
"a",
"polygon",
"defined",
"by",
"its",
"vertices"
] |
def triangulate(pgon):
"triangulate a polygon defined by its vertices"
t = Triangulator(pgon)
return t.process()
|
[
"def",
"triangulate",
"(",
"pgon",
")",
":",
"t",
"=",
"Triangulator",
"(",
"pgon",
")",
"return",
"t",
".",
"process",
"(",
")"
] |
https://github.com/OGRECave/ogre-next/blob/287307980e6de8910f04f3cc0994451b075071fd/Tools/Wings3DExporter/pgon.py#L186-L191
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/build/waf-1.7.13/platforms/platform_impl_win_x64.py
|
python
|
run_unittest_launcher_for_win_x64
|
(ctx, game_project_name)
|
Helper context function to execute the unit test launcher for a specific game project
:param ctx: Context
:param game_project_name: The current project name (extracted from bootstrap.cfg)
|
Helper context function to execute the unit test launcher for a specific game project
|
[
"Helper",
"context",
"function",
"to",
"execute",
"the",
"unit",
"test",
"launcher",
"for",
"a",
"specific",
"game",
"project"
] |
def run_unittest_launcher_for_win_x64(ctx, game_project_name):
"""
Helper context function to execute the unit test launcher for a specific game project
:param ctx: Context
:param game_project_name: The current project name (extracted from bootstrap.cfg)
"""
output_folder = ctx.get_output_folders(ctx.platform, ctx.config)[0]
current_project_launcher = ctx.env['cprogram_PATTERN'] % '{}Launcher'.format(game_project_name)
current_project_unittest_launcher_fullpath = os.path.join(output_folder.abspath(), current_project_launcher)
if not os.path.isfile(current_project_unittest_launcher_fullpath):
raise Errors.WafError("Unable to launch unit tests for project '{}'. Cannot find launcher file '{}'. Make sure the project has been built successfully.".format(game_project_name, current_project_unittest_launcher_fullpath))
Logs.info('[WAF] Running unit tests for {}'.format(game_project_name))
try:
call_args = [current_project_unittest_launcher_fullpath]
if not ctx.is_engine_local():
call_args.extend(['--app-root', ctx.get_launch_node().abspath()])
# Grab any optional arguments
auto_launch_unit_test_arguments = ctx.get_settings_value('auto_launch_unit_test_arguments')
if auto_launch_unit_test_arguments:
call_args.extend(auto_launch_unit_test_arguments.split(' '))
result_code = subprocess.call(call_args)
except Exception as e:
raise Errors.WafError("Error executing unit tests for '{}': {}".format(game_project_name, e))
if result_code != 0:
raise Errors.WafError("Unit tests for '{}' failed. Return code {}".format(game_project_name, result_code))
else:
Logs.info('[WAF] Running unit tests for {}'.format(game_project_name))
|
[
"def",
"run_unittest_launcher_for_win_x64",
"(",
"ctx",
",",
"game_project_name",
")",
":",
"output_folder",
"=",
"ctx",
".",
"get_output_folders",
"(",
"ctx",
".",
"platform",
",",
"ctx",
".",
"config",
")",
"[",
"0",
"]",
"current_project_launcher",
"=",
"ctx",
".",
"env",
"[",
"'cprogram_PATTERN'",
"]",
"%",
"'{}Launcher'",
".",
"format",
"(",
"game_project_name",
")",
"current_project_unittest_launcher_fullpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"output_folder",
".",
"abspath",
"(",
")",
",",
"current_project_launcher",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"current_project_unittest_launcher_fullpath",
")",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"\"Unable to launch unit tests for project '{}'. Cannot find launcher file '{}'. Make sure the project has been built successfully.\"",
".",
"format",
"(",
"game_project_name",
",",
"current_project_unittest_launcher_fullpath",
")",
")",
"Logs",
".",
"info",
"(",
"'[WAF] Running unit tests for {}'",
".",
"format",
"(",
"game_project_name",
")",
")",
"try",
":",
"call_args",
"=",
"[",
"current_project_unittest_launcher_fullpath",
"]",
"if",
"not",
"ctx",
".",
"is_engine_local",
"(",
")",
":",
"call_args",
".",
"extend",
"(",
"[",
"'--app-root'",
",",
"ctx",
".",
"get_launch_node",
"(",
")",
".",
"abspath",
"(",
")",
"]",
")",
"# Grab any optional arguments",
"auto_launch_unit_test_arguments",
"=",
"ctx",
".",
"get_settings_value",
"(",
"'auto_launch_unit_test_arguments'",
")",
"if",
"auto_launch_unit_test_arguments",
":",
"call_args",
".",
"extend",
"(",
"auto_launch_unit_test_arguments",
".",
"split",
"(",
"' '",
")",
")",
"result_code",
"=",
"subprocess",
".",
"call",
"(",
"call_args",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"\"Error executing unit tests for '{}': {}\"",
".",
"format",
"(",
"game_project_name",
",",
"e",
")",
")",
"if",
"result_code",
"!=",
"0",
":",
"raise",
"Errors",
".",
"WafError",
"(",
"\"Unit tests for '{}' failed. Return code {}\"",
".",
"format",
"(",
"game_project_name",
",",
"result_code",
")",
")",
"else",
":",
"Logs",
".",
"info",
"(",
"'[WAF] Running unit tests for {}'",
".",
"format",
"(",
"game_project_name",
")",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/platforms/platform_impl_win_x64.py#L41-L74
|
||
SFTtech/openage
|
d6a08c53c48dc1e157807471df92197f6ca9e04d
|
openage/util/ordered_set.py
|
python
|
OrderedSet.union
|
(self, other)
|
return OrderedSet(element_list)
|
Returns a new ordered set with the elements from self and other.
|
Returns a new ordered set with the elements from self and other.
|
[
"Returns",
"a",
"new",
"ordered",
"set",
"with",
"the",
"elements",
"from",
"self",
"and",
"other",
"."
] |
def union(self, other):
"""
Returns a new ordered set with the elements from self and other.
"""
element_list = self.get_list() + other.get_list()
return OrderedSet(element_list)
|
[
"def",
"union",
"(",
"self",
",",
"other",
")",
":",
"element_list",
"=",
"self",
".",
"get_list",
"(",
")",
"+",
"other",
".",
"get_list",
"(",
")",
"return",
"OrderedSet",
"(",
"element_list",
")"
] |
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/util/ordered_set.py#L90-L95
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py3/numpy/lib/format.py
|
python
|
_wrap_header
|
(header, version)
|
return header_prefix + header + b' '*padlen + b'\n'
|
Takes a stringified header, and attaches the prefix and padding to it
|
Takes a stringified header, and attaches the prefix and padding to it
|
[
"Takes",
"a",
"stringified",
"header",
"and",
"attaches",
"the",
"prefix",
"and",
"padding",
"to",
"it"
] |
def _wrap_header(header, version):
"""
Takes a stringified header, and attaches the prefix and padding to it
"""
import struct
assert version is not None
fmt, encoding = _header_size_info[version]
if not isinstance(header, bytes): # always true on python 3
header = header.encode(encoding)
hlen = len(header) + 1
padlen = ARRAY_ALIGN - ((MAGIC_LEN + struct.calcsize(fmt) + hlen) % ARRAY_ALIGN)
try:
header_prefix = magic(*version) + struct.pack(fmt, hlen + padlen)
except struct.error:
msg = "Header length {} too big for version={}".format(hlen, version)
raise ValueError(msg) from None
# Pad the header with spaces and a final newline such that the magic
# string, the header-length short and the header are aligned on a
# ARRAY_ALIGN byte boundary. This supports memory mapping of dtypes
# aligned up to ARRAY_ALIGN on systems like Linux where mmap()
# offset must be page-aligned (i.e. the beginning of the file).
return header_prefix + header + b' '*padlen + b'\n'
|
[
"def",
"_wrap_header",
"(",
"header",
",",
"version",
")",
":",
"import",
"struct",
"assert",
"version",
"is",
"not",
"None",
"fmt",
",",
"encoding",
"=",
"_header_size_info",
"[",
"version",
"]",
"if",
"not",
"isinstance",
"(",
"header",
",",
"bytes",
")",
":",
"# always true on python 3",
"header",
"=",
"header",
".",
"encode",
"(",
"encoding",
")",
"hlen",
"=",
"len",
"(",
"header",
")",
"+",
"1",
"padlen",
"=",
"ARRAY_ALIGN",
"-",
"(",
"(",
"MAGIC_LEN",
"+",
"struct",
".",
"calcsize",
"(",
"fmt",
")",
"+",
"hlen",
")",
"%",
"ARRAY_ALIGN",
")",
"try",
":",
"header_prefix",
"=",
"magic",
"(",
"*",
"version",
")",
"+",
"struct",
".",
"pack",
"(",
"fmt",
",",
"hlen",
"+",
"padlen",
")",
"except",
"struct",
".",
"error",
":",
"msg",
"=",
"\"Header length {} too big for version={}\"",
".",
"format",
"(",
"hlen",
",",
"version",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"from",
"None",
"# Pad the header with spaces and a final newline such that the magic",
"# string, the header-length short and the header are aligned on a",
"# ARRAY_ALIGN byte boundary. This supports memory mapping of dtypes",
"# aligned up to ARRAY_ALIGN on systems like Linux where mmap()",
"# offset must be page-aligned (i.e. the beginning of the file).",
"return",
"header_prefix",
"+",
"header",
"+",
"b' '",
"*",
"padlen",
"+",
"b'\\n'"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/lib/format.py#L367-L389
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/distutils/command/install_egg_info.py
|
python
|
safe_version
|
(version)
|
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
|
Convert an arbitrary string to a standard version string
|
[
"Convert",
"an",
"arbitrary",
"string",
"to",
"a",
"standard",
"version",
"string"
] |
def safe_version(version):
"""Convert an arbitrary string to a standard version string
Spaces become dots, and all other non-alphanumeric characters become
dashes, with runs of multiple dashes condensed to a single dash.
"""
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
[
"def",
"safe_version",
"(",
"version",
")",
":",
"version",
"=",
"version",
".",
"replace",
"(",
"' '",
",",
"'.'",
")",
"return",
"re",
".",
"sub",
"(",
"'[^A-Za-z0-9.]+'",
",",
"'-'",
",",
"version",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/distutils/command/install_egg_info.py#L63-L70
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
contrib/gizmos/gtk/gizmos.py
|
python
|
ThinSplitterWindow.__init__
|
(self, *args, **kwargs)
|
__init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxSP_3D|wxCLIP_CHILDREN) -> ThinSplitterWindow
|
__init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxSP_3D|wxCLIP_CHILDREN) -> ThinSplitterWindow
|
[
"__init__",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"wxSP_3D|wxCLIP_CHILDREN",
")",
"-",
">",
"ThinSplitterWindow"
] |
def __init__(self, *args, **kwargs):
"""
__init__(self, Window parent, int id=-1, Point pos=DefaultPosition,
Size size=DefaultSize, long style=wxSP_3D|wxCLIP_CHILDREN) -> ThinSplitterWindow
"""
_gizmos.ThinSplitterWindow_swiginit(self,_gizmos.new_ThinSplitterWindow(*args, **kwargs))
self._setOORInfo(self)
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_gizmos",
".",
"ThinSplitterWindow_swiginit",
"(",
"self",
",",
"_gizmos",
".",
"new_ThinSplitterWindow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/gtk/gizmos.py#L272-L278
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/richtext.py
|
python
|
RichTextPlainText.GetFirstLineBreakPosition
|
(*args, **kwargs)
|
return _richtext.RichTextPlainText_GetFirstLineBreakPosition(*args, **kwargs)
|
GetFirstLineBreakPosition(self, long pos) -> long
|
GetFirstLineBreakPosition(self, long pos) -> long
|
[
"GetFirstLineBreakPosition",
"(",
"self",
"long",
"pos",
")",
"-",
">",
"long"
] |
def GetFirstLineBreakPosition(*args, **kwargs):
"""GetFirstLineBreakPosition(self, long pos) -> long"""
return _richtext.RichTextPlainText_GetFirstLineBreakPosition(*args, **kwargs)
|
[
"def",
"GetFirstLineBreakPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_richtext",
".",
"RichTextPlainText_GetFirstLineBreakPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/richtext.py#L2092-L2094
|
|
fenderglass/Flye
|
2013acc650356cc934a2a9b82eb90af260c8b52b
|
flye/utils/fasta_parser.py
|
python
|
_read_fastq
|
(file_handle)
|
bytes input / output
|
bytes input / output
|
[
"bytes",
"input",
"/",
"output"
] |
def _read_fastq(file_handle):
"""
bytes input / output
"""
seq = None
qual = None
header = None
state_counter = 0
for no, line in enumerate(file_handle):
line = line.strip()
if not line:
continue
if state_counter == 0:
if line[0 : 1] != b"@":
raise FastaError("Fastq format error: {0} at line {1}"
.format(file_handle.name, no))
header = line[1:].split()[0]
if state_counter == 1:
seq = line
if state_counter == 2:
if line[0 : 1] != b"+":
raise FastaError("Fastq format error: {0} at line {1}"
.format(file_handle.name, no))
if state_counter == 3:
qual = line
yield header, seq, qual
state_counter = (state_counter + 1) % 4
|
[
"def",
"_read_fastq",
"(",
"file_handle",
")",
":",
"seq",
"=",
"None",
"qual",
"=",
"None",
"header",
"=",
"None",
"state_counter",
"=",
"0",
"for",
"no",
",",
"line",
"in",
"enumerate",
"(",
"file_handle",
")",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"not",
"line",
":",
"continue",
"if",
"state_counter",
"==",
"0",
":",
"if",
"line",
"[",
"0",
":",
"1",
"]",
"!=",
"b\"@\"",
":",
"raise",
"FastaError",
"(",
"\"Fastq format error: {0} at line {1}\"",
".",
"format",
"(",
"file_handle",
".",
"name",
",",
"no",
")",
")",
"header",
"=",
"line",
"[",
"1",
":",
"]",
".",
"split",
"(",
")",
"[",
"0",
"]",
"if",
"state_counter",
"==",
"1",
":",
"seq",
"=",
"line",
"if",
"state_counter",
"==",
"2",
":",
"if",
"line",
"[",
"0",
":",
"1",
"]",
"!=",
"b\"+\"",
":",
"raise",
"FastaError",
"(",
"\"Fastq format error: {0} at line {1}\"",
".",
"format",
"(",
"file_handle",
".",
"name",
",",
"no",
")",
")",
"if",
"state_counter",
"==",
"3",
":",
"qual",
"=",
"line",
"yield",
"header",
",",
"seq",
",",
"qual",
"state_counter",
"=",
"(",
"state_counter",
"+",
"1",
")",
"%",
"4"
] |
https://github.com/fenderglass/Flye/blob/2013acc650356cc934a2a9b82eb90af260c8b52b/flye/utils/fasta_parser.py#L154-L186
|
||
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/eager/wrap_function.py
|
python
|
_filter_returned_ops
|
(fn)
|
return wrap_and_filter_returned_ops, returned_ops
|
Filtering out any ops returned by function.
Args:
fn: a function
Returns:
A tuple of (
Wrapped function that returns `None` in place of any ops,
dict that maps the index in the flat output structure to the returned op
)
|
Filtering out any ops returned by function.
|
[
"Filtering",
"out",
"any",
"ops",
"returned",
"by",
"function",
"."
] |
def _filter_returned_ops(fn):
"""Filtering out any ops returned by function.
Args:
fn: a function
Returns:
A tuple of (
Wrapped function that returns `None` in place of any ops,
dict that maps the index in the flat output structure to the returned op
)
"""
returned_ops = {}
def wrap_and_filter_returned_ops(*args, **kwargs):
outputs = fn(*args, **kwargs)
flat_outputs = nest.flatten(outputs)
for n in range(len(flat_outputs)):
output = flat_outputs[n]
if isinstance(output, ops.Operation):
returned_ops[n] = output
flat_outputs[n] = None
return nest.pack_sequence_as(outputs, flat_outputs)
return wrap_and_filter_returned_ops, returned_ops
|
[
"def",
"_filter_returned_ops",
"(",
"fn",
")",
":",
"returned_ops",
"=",
"{",
"}",
"def",
"wrap_and_filter_returned_ops",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"outputs",
"=",
"fn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"flat_outputs",
"=",
"nest",
".",
"flatten",
"(",
"outputs",
")",
"for",
"n",
"in",
"range",
"(",
"len",
"(",
"flat_outputs",
")",
")",
":",
"output",
"=",
"flat_outputs",
"[",
"n",
"]",
"if",
"isinstance",
"(",
"output",
",",
"ops",
".",
"Operation",
")",
":",
"returned_ops",
"[",
"n",
"]",
"=",
"output",
"flat_outputs",
"[",
"n",
"]",
"=",
"None",
"return",
"nest",
".",
"pack_sequence_as",
"(",
"outputs",
",",
"flat_outputs",
")",
"return",
"wrap_and_filter_returned_ops",
",",
"returned_ops"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/eager/wrap_function.py#L380-L404
|
|
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/contrib/distributions/python/ops/inverse_gamma.py
|
python
|
InverseGamma.get_event_shape
|
(self)
|
return self._get_event_shape
|
`TensorShape` available at graph construction time.
Same meaning as `event_shape`. May be only partially defined.
Returns:
`TensorShape` object.
|
`TensorShape` available at graph construction time.
|
[
"TensorShape",
"available",
"at",
"graph",
"construction",
"time",
"."
] |
def get_event_shape(self):
"""`TensorShape` available at graph construction time.
Same meaning as `event_shape`. May be only partially defined.
Returns:
`TensorShape` object.
"""
return self._get_event_shape
|
[
"def",
"get_event_shape",
"(",
"self",
")",
":",
"return",
"self",
".",
"_get_event_shape"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/inverse_gamma.py#L176-L184
|
|
ablab/quast
|
5f6709528129a6ad266a6b24ef3f40b88f0fe04b
|
quast_libs/site_packages/bz2.py
|
python
|
BZ2File.__init__
|
(self, filename, mode="r", buffering=None, compresslevel=9)
|
Open a bzip2-compressed file.
If filename is a str or bytes object, it gives the name
of the file to be opened. Otherwise, it should be a file object,
which will be used to read or write the compressed data.
mode can be 'r' for reading (default), 'w' for (over)writing,
'x' for creating exclusively, or 'a' for appending. These can
equivalently be given as 'rb', 'wb', 'xb', and 'ab'.
buffering is ignored. Its use is deprecated.
If mode is 'w', 'x' or 'a', compresslevel can be a number between 1
and 9 specifying the level of compression: 1 produces the least
compression, and 9 (default) produces the most compression.
If mode is 'r', the input file may be the concatenation of
multiple compressed streams.
|
Open a bzip2-compressed file.
|
[
"Open",
"a",
"bzip2",
"-",
"compressed",
"file",
"."
] |
def __init__(self, filename, mode="r", buffering=None, compresslevel=9):
"""Open a bzip2-compressed file.
If filename is a str or bytes object, it gives the name
of the file to be opened. Otherwise, it should be a file object,
which will be used to read or write the compressed data.
mode can be 'r' for reading (default), 'w' for (over)writing,
'x' for creating exclusively, or 'a' for appending. These can
equivalently be given as 'rb', 'wb', 'xb', and 'ab'.
buffering is ignored. Its use is deprecated.
If mode is 'w', 'x' or 'a', compresslevel can be a number between 1
and 9 specifying the level of compression: 1 produces the least
compression, and 9 (default) produces the most compression.
If mode is 'r', the input file may be the concatenation of
multiple compressed streams.
"""
# This lock must be recursive, so that BufferedIOBase's
# writelines() does not deadlock.
self._lock = RLock()
self._fp = None
self._closefp = False
self._mode = _MODE_CLOSED
if buffering is not None:
warnings.warn("Use of 'buffering' argument is deprecated",
DeprecationWarning)
if not (1 <= compresslevel <= 9):
raise ValueError("compresslevel must be between 1 and 9")
if mode in ("", "r", "rb"):
mode = "rb"
mode_code = _MODE_READ
elif mode in ("w", "wb"):
mode = "wb"
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
elif mode in ("x", "xb"):
mode = "xb"
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
elif mode in ("a", "ab"):
mode = "ab"
mode_code = _MODE_WRITE
self._compressor = BZ2Compressor(compresslevel)
else:
raise ValueError("Invalid mode: %r" % (mode,))
if isinstance(filename, (str, bytes)):
self._fp = _builtin_open(filename, mode)
self._closefp = True
self._mode = mode_code
elif hasattr(filename, "read") or hasattr(filename, "write"):
self._fp = filename
self._mode = mode_code
else:
raise TypeError("filename must be a str or bytes object, or a file")
if self._mode == _MODE_READ:
raw = _compression.DecompressReader(self._fp,
BZ2Decompressor, trailing_error=OSError)
self._buffer = io.BufferedReader(raw)
else:
self._pos = 0
|
[
"def",
"__init__",
"(",
"self",
",",
"filename",
",",
"mode",
"=",
"\"r\"",
",",
"buffering",
"=",
"None",
",",
"compresslevel",
"=",
"9",
")",
":",
"# This lock must be recursive, so that BufferedIOBase's",
"# writelines() does not deadlock.",
"self",
".",
"_lock",
"=",
"RLock",
"(",
")",
"self",
".",
"_fp",
"=",
"None",
"self",
".",
"_closefp",
"=",
"False",
"self",
".",
"_mode",
"=",
"_MODE_CLOSED",
"if",
"buffering",
"is",
"not",
"None",
":",
"warnings",
".",
"warn",
"(",
"\"Use of 'buffering' argument is deprecated\"",
",",
"DeprecationWarning",
")",
"if",
"not",
"(",
"1",
"<=",
"compresslevel",
"<=",
"9",
")",
":",
"raise",
"ValueError",
"(",
"\"compresslevel must be between 1 and 9\"",
")",
"if",
"mode",
"in",
"(",
"\"\"",
",",
"\"r\"",
",",
"\"rb\"",
")",
":",
"mode",
"=",
"\"rb\"",
"mode_code",
"=",
"_MODE_READ",
"elif",
"mode",
"in",
"(",
"\"w\"",
",",
"\"wb\"",
")",
":",
"mode",
"=",
"\"wb\"",
"mode_code",
"=",
"_MODE_WRITE",
"self",
".",
"_compressor",
"=",
"BZ2Compressor",
"(",
"compresslevel",
")",
"elif",
"mode",
"in",
"(",
"\"x\"",
",",
"\"xb\"",
")",
":",
"mode",
"=",
"\"xb\"",
"mode_code",
"=",
"_MODE_WRITE",
"self",
".",
"_compressor",
"=",
"BZ2Compressor",
"(",
"compresslevel",
")",
"elif",
"mode",
"in",
"(",
"\"a\"",
",",
"\"ab\"",
")",
":",
"mode",
"=",
"\"ab\"",
"mode_code",
"=",
"_MODE_WRITE",
"self",
".",
"_compressor",
"=",
"BZ2Compressor",
"(",
"compresslevel",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Invalid mode: %r\"",
"%",
"(",
"mode",
",",
")",
")",
"if",
"isinstance",
"(",
"filename",
",",
"(",
"str",
",",
"bytes",
")",
")",
":",
"self",
".",
"_fp",
"=",
"_builtin_open",
"(",
"filename",
",",
"mode",
")",
"self",
".",
"_closefp",
"=",
"True",
"self",
".",
"_mode",
"=",
"mode_code",
"elif",
"hasattr",
"(",
"filename",
",",
"\"read\"",
")",
"or",
"hasattr",
"(",
"filename",
",",
"\"write\"",
")",
":",
"self",
".",
"_fp",
"=",
"filename",
"self",
".",
"_mode",
"=",
"mode_code",
"else",
":",
"raise",
"TypeError",
"(",
"\"filename must be a str or bytes object, or a file\"",
")",
"if",
"self",
".",
"_mode",
"==",
"_MODE_READ",
":",
"raw",
"=",
"_compression",
".",
"DecompressReader",
"(",
"self",
".",
"_fp",
",",
"BZ2Decompressor",
",",
"trailing_error",
"=",
"OSError",
")",
"self",
".",
"_buffer",
"=",
"io",
".",
"BufferedReader",
"(",
"raw",
")",
"else",
":",
"self",
".",
"_pos",
"=",
"0"
] |
https://github.com/ablab/quast/blob/5f6709528129a6ad266a6b24ef3f40b88f0fe04b/quast_libs/site_packages/bz2.py#L46-L113
|
||
okex/V3-Open-API-SDK
|
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
|
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/index.py
|
python
|
_is_url_like_archive
|
(url)
|
return False
|
Return whether the URL looks like an archive.
|
Return whether the URL looks like an archive.
|
[
"Return",
"whether",
"the",
"URL",
"looks",
"like",
"an",
"archive",
"."
] |
def _is_url_like_archive(url):
# type: (str) -> bool
"""Return whether the URL looks like an archive.
"""
filename = Link(url).filename
for bad_ext in ARCHIVE_EXTENSIONS:
if filename.endswith(bad_ext):
return True
return False
|
[
"def",
"_is_url_like_archive",
"(",
"url",
")",
":",
"# type: (str) -> bool",
"filename",
"=",
"Link",
"(",
"url",
")",
".",
"filename",
"for",
"bad_ext",
"in",
"ARCHIVE_EXTENSIONS",
":",
"if",
"filename",
".",
"endswith",
"(",
"bad_ext",
")",
":",
"return",
"True",
"return",
"False"
] |
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_internal/index.py#L90-L98
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
torch/distributed/elastic/rendezvous/etcd_store.py
|
python
|
EtcdStore.wait
|
(self, keys, override_timeout: Optional[datetime.timedelta] = None)
|
Waits until all of the keys are published, or until timeout.
Raises:
LookupError - if timeout occurs
|
Waits until all of the keys are published, or until timeout.
|
[
"Waits",
"until",
"all",
"of",
"the",
"keys",
"are",
"published",
"or",
"until",
"timeout",
"."
] |
def wait(self, keys, override_timeout: Optional[datetime.timedelta] = None):
"""
Waits until all of the keys are published, or until timeout.
Raises:
LookupError - if timeout occurs
"""
b64_keys = [self.prefix + self._encode(key) for key in keys]
kvs = self._try_wait_get(b64_keys, override_timeout)
if kvs is None:
raise LookupError("Timeout while waiting for keys in EtcdStore")
|
[
"def",
"wait",
"(",
"self",
",",
"keys",
",",
"override_timeout",
":",
"Optional",
"[",
"datetime",
".",
"timedelta",
"]",
"=",
"None",
")",
":",
"b64_keys",
"=",
"[",
"self",
".",
"prefix",
"+",
"self",
".",
"_encode",
"(",
"key",
")",
"for",
"key",
"in",
"keys",
"]",
"kvs",
"=",
"self",
".",
"_try_wait_get",
"(",
"b64_keys",
",",
"override_timeout",
")",
"if",
"kvs",
"is",
"None",
":",
"raise",
"LookupError",
"(",
"\"Timeout while waiting for keys in EtcdStore\"",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/distributed/elastic/rendezvous/etcd_store.py#L116-L126
|
||
smilehao/xlua-framework
|
a03801538be2b0e92d39332d445b22caca1ef61f
|
ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/internal/containers.py
|
python
|
RepeatedScalarFieldContainer.__setitem__
|
(self, key, value)
|
Sets the item on the specified position.
|
Sets the item on the specified position.
|
[
"Sets",
"the",
"item",
"on",
"the",
"specified",
"position",
"."
] |
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
self._type_checker.CheckValue(value)
self._values[key] = value
self._message_listener.Modified()
|
[
"def",
"__setitem__",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"_type_checker",
".",
"CheckValue",
"(",
"value",
")",
"self",
".",
"_values",
"[",
"key",
"]",
"=",
"value",
"self",
".",
"_message_listener",
".",
"Modified",
"(",
")"
] |
https://github.com/smilehao/xlua-framework/blob/a03801538be2b0e92d39332d445b22caca1ef61f/ConfigData/trunk/tools/protobuf-2.5.0/protobuf-2.5.0/python/build/lib/google/protobuf/internal/containers.py#L147-L151
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py
|
python
|
Environment.__iter__
|
(self)
|
Yield the unique project names of the available distributions
|
Yield the unique project names of the available distributions
|
[
"Yield",
"the",
"unique",
"project",
"names",
"of",
"the",
"available",
"distributions"
] |
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
|
[
"def",
"__iter__",
"(",
"self",
")",
":",
"for",
"key",
"in",
"self",
".",
"_distmap",
".",
"keys",
"(",
")",
":",
"if",
"self",
"[",
"key",
"]",
":",
"yield",
"key"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pkg_resources/__init__.py#L1160-L1164
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/metrics_impl.py
|
python
|
_sparse_true_positive_at_k
|
(labels,
predictions_idx,
class_id=None,
weights=None,
name=None)
|
Calculates true positives for recall@k and precision@k.
If `class_id` is specified, calculate binary true positives for `class_id`
only.
If `class_id` is not specified, calculate metrics for `k` predicted vs
`n` label classes, where `n` is the 2nd dimension of `labels_sparse`.
Args:
labels: `int64` `Tensor` or `SparseTensor` with shape
[D1, ... DN, num_labels], where N >= 1 and num_labels is the number of
target classes for the associated prediction. Commonly, N=1 and `labels`
has shape [batch_size, num_labels]. [D1, ... DN] must match
`predictions_idx`.
predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`,
top `k` predicted classes. For rank `n`, the first `n-1` dimensions must
match `labels`.
class_id: Class for which we want binary metrics.
weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of
`labels`. If the latter, it must be broadcastable to `labels` (i.e., all
dimensions must be either `1`, or the same as the corresponding `labels`
dimension).
name: Name of operation.
Returns:
A [D1, ... DN] `Tensor` of true positive counts.
|
Calculates true positives for recall@k and precision@k.
|
[
"Calculates",
"true",
"positives",
"for",
"recall@k",
"and",
"precision@k",
"."
] |
def _sparse_true_positive_at_k(labels,
predictions_idx,
class_id=None,
weights=None,
name=None):
"""Calculates true positives for recall@k and precision@k.
If `class_id` is specified, calculate binary true positives for `class_id`
only.
If `class_id` is not specified, calculate metrics for `k` predicted vs
`n` label classes, where `n` is the 2nd dimension of `labels_sparse`.
Args:
labels: `int64` `Tensor` or `SparseTensor` with shape
[D1, ... DN, num_labels], where N >= 1 and num_labels is the number of
target classes for the associated prediction. Commonly, N=1 and `labels`
has shape [batch_size, num_labels]. [D1, ... DN] must match
`predictions_idx`.
predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`,
top `k` predicted classes. For rank `n`, the first `n-1` dimensions must
match `labels`.
class_id: Class for which we want binary metrics.
weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of
`labels`. If the latter, it must be broadcastable to `labels` (i.e., all
dimensions must be either `1`, or the same as the corresponding `labels`
dimension).
name: Name of operation.
Returns:
A [D1, ... DN] `Tensor` of true positive counts.
"""
with ops.name_scope(name, 'true_positives',
(predictions_idx, labels, weights)):
labels, predictions_idx = _maybe_select_class_id(labels, predictions_idx,
class_id)
tp = sets.set_size(sets.set_intersection(predictions_idx, labels))
tp = math_ops.cast(tp, dtypes.float64)
if weights is not None:
with ops.control_dependencies((weights_broadcast_ops.assert_broadcastable(
weights, tp),)):
weights = math_ops.cast(weights, dtypes.float64)
tp = math_ops.multiply(tp, weights)
return tp
|
[
"def",
"_sparse_true_positive_at_k",
"(",
"labels",
",",
"predictions_idx",
",",
"class_id",
"=",
"None",
",",
"weights",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"'true_positives'",
",",
"(",
"predictions_idx",
",",
"labels",
",",
"weights",
")",
")",
":",
"labels",
",",
"predictions_idx",
"=",
"_maybe_select_class_id",
"(",
"labels",
",",
"predictions_idx",
",",
"class_id",
")",
"tp",
"=",
"sets",
".",
"set_size",
"(",
"sets",
".",
"set_intersection",
"(",
"predictions_idx",
",",
"labels",
")",
")",
"tp",
"=",
"math_ops",
".",
"cast",
"(",
"tp",
",",
"dtypes",
".",
"float64",
")",
"if",
"weights",
"is",
"not",
"None",
":",
"with",
"ops",
".",
"control_dependencies",
"(",
"(",
"weights_broadcast_ops",
".",
"assert_broadcastable",
"(",
"weights",
",",
"tp",
")",
",",
")",
")",
":",
"weights",
"=",
"math_ops",
".",
"cast",
"(",
"weights",
",",
"dtypes",
".",
"float64",
")",
"tp",
"=",
"math_ops",
".",
"multiply",
"(",
"tp",
",",
"weights",
")",
"return",
"tp"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/metrics_impl.py#L2283-L2325
|
||
MegEngine/MegEngine
|
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
|
imperative/python/megengine/traced_module/node.py
|
python
|
Node.qualname
|
(self)
|
return self._qualname
|
r"""Get the `qualname` of this Node. The `qualname` can be used to get the
submodule from the traced Module or Module.
Example:
.. code-block::
import megengine.module as M
import megengine.functional as F
import megengine.traced_module as tm
import megengine as mge
class block(M.Module):
def __init__(self):
super().__init__()
self.param = mge.Tensor([1.])
self.relu = M.ReLU()
def forward(self, x):
x = x + self.param
return self.relu(F.relu(x))
class module(M.Module):
def __init__(self):
super().__init__()
self.block = block()
def forward(self, x):
x = self.block(x)
return x
net = module()
traced_net = tm.trace_module(net, mge.Tensor([0.]))
traced_net = traced_net.flatten()
out_node = traced_net.graph.outputs[0]
# qualname : "module.block.relu.[out]"
qualname = out_node.qualname
# qualname : "block.relu"
qualname = qualname.split(".", 1)[-1].rsplit(".", 1)[0]
assert qualname in list(map(lambda x: x[0], net.named_modules()))
assert qualname in list(map(lambda x: x[0], traced_net.named_modules()))
|
r"""Get the `qualname` of this Node. The `qualname` can be used to get the
submodule from the traced Module or Module.
|
[
"r",
"Get",
"the",
"qualname",
"of",
"this",
"Node",
".",
"The",
"qualname",
"can",
"be",
"used",
"to",
"get",
"the",
"submodule",
"from",
"the",
"traced",
"Module",
"or",
"Module",
"."
] |
def qualname(self):
r"""Get the `qualname` of this Node. The `qualname` can be used to get the
submodule from the traced Module or Module.
Example:
.. code-block::
import megengine.module as M
import megengine.functional as F
import megengine.traced_module as tm
import megengine as mge
class block(M.Module):
def __init__(self):
super().__init__()
self.param = mge.Tensor([1.])
self.relu = M.ReLU()
def forward(self, x):
x = x + self.param
return self.relu(F.relu(x))
class module(M.Module):
def __init__(self):
super().__init__()
self.block = block()
def forward(self, x):
x = self.block(x)
return x
net = module()
traced_net = tm.trace_module(net, mge.Tensor([0.]))
traced_net = traced_net.flatten()
out_node = traced_net.graph.outputs[0]
# qualname : "module.block.relu.[out]"
qualname = out_node.qualname
# qualname : "block.relu"
qualname = qualname.split(".", 1)[-1].rsplit(".", 1)[0]
assert qualname in list(map(lambda x: x[0], net.named_modules()))
assert qualname in list(map(lambda x: x[0], traced_net.named_modules()))
"""
return self._qualname
|
[
"def",
"qualname",
"(",
"self",
")",
":",
"return",
"self",
".",
"_qualname"
] |
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/traced_module/node.py#L87-L131
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py
|
python
|
_ProxyFile.tell
|
(self)
|
return self._pos
|
Return the position.
|
Return the position.
|
[
"Return",
"the",
"position",
"."
] |
def tell(self):
"""Return the position."""
return self._pos
|
[
"def",
"tell",
"(",
"self",
")",
":",
"return",
"self",
".",
"_pos"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py#L1894-L1896
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/monitoring.py
|
python
|
BaseMonitorEvent.__init__
|
(self, service, operation, timestamp)
|
Base monitor event
:type service: str
:param service: A string identifying the service associated to
the event
:type operation: str
:param operation: A string identifying the operation of service
associated to the event
:type timestamp: int
:param timestamp: Epoch time in milliseconds from when the event began
|
Base monitor event
|
[
"Base",
"monitor",
"event"
] |
def __init__(self, service, operation, timestamp):
"""Base monitor event
:type service: str
:param service: A string identifying the service associated to
the event
:type operation: str
:param operation: A string identifying the operation of service
associated to the event
:type timestamp: int
:param timestamp: Epoch time in milliseconds from when the event began
"""
self.service = service
self.operation = operation
self.timestamp = timestamp
|
[
"def",
"__init__",
"(",
"self",
",",
"service",
",",
"operation",
",",
"timestamp",
")",
":",
"self",
".",
"service",
"=",
"service",
"self",
".",
"operation",
"=",
"operation",
"self",
".",
"timestamp",
"=",
"timestamp"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/botocore/monitoring.py#L157-L173
|
||
Genius-x/genius-x
|
9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0
|
cocos2d/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py
|
python
|
Type.is_restrict_qualified
|
(self)
|
return conf.lib.clang_isRestrictQualifiedType(self)
|
Determine whether a Type has the "restrict" qualifier set.
This does not look through typedefs that may have added "restrict" at
a different level.
|
Determine whether a Type has the "restrict" qualifier set.
|
[
"Determine",
"whether",
"a",
"Type",
"has",
"the",
"restrict",
"qualifier",
"set",
"."
] |
def is_restrict_qualified(self):
"""Determine whether a Type has the "restrict" qualifier set.
This does not look through typedefs that may have added "restrict" at
a different level.
"""
return conf.lib.clang_isRestrictQualifiedType(self)
|
[
"def",
"is_restrict_qualified",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_isRestrictQualifiedType",
"(",
"self",
")"
] |
https://github.com/Genius-x/genius-x/blob/9fc9f194e6d1fb92dd0e33d43db19ddb67cda7b0/cocos2d/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py#L1580-L1586
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/telemetry/telemetry/internal/platform/gpu_device.py
|
python
|
GPUDevice.device_id
|
(self)
|
return self._device_id
|
The GPU device's PCI ID as a number, or 0 if not available.
Most desktop machines supply this information rather than the
vendor and device strings.
|
The GPU device's PCI ID as a number, or 0 if not available.
|
[
"The",
"GPU",
"device",
"s",
"PCI",
"ID",
"as",
"a",
"number",
"or",
"0",
"if",
"not",
"available",
"."
] |
def device_id(self):
"""The GPU device's PCI ID as a number, or 0 if not available.
Most desktop machines supply this information rather than the
vendor and device strings."""
return self._device_id
|
[
"def",
"device_id",
"(",
"self",
")",
":",
"return",
"self",
".",
"_device_id"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/telemetry/internal/platform/gpu_device.py#L61-L66
|
|
hfinkel/llvm-project-cxxjit
|
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
|
llvm/utils/benchmark/tools/gbench/report.py
|
python
|
generate_difference_report
|
(json1, json2, use_color=True)
|
return output_strs
|
Calculate and report the difference between each test of two benchmarks
runs specified as 'json1' and 'json2'.
|
Calculate and report the difference between each test of two benchmarks
runs specified as 'json1' and 'json2'.
|
[
"Calculate",
"and",
"report",
"the",
"difference",
"between",
"each",
"test",
"of",
"two",
"benchmarks",
"runs",
"specified",
"as",
"json1",
"and",
"json2",
"."
] |
def generate_difference_report(json1, json2, use_color=True):
"""
Calculate and report the difference between each test of two benchmarks
runs specified as 'json1' and 'json2'.
"""
first_col_width = find_longest_name(json1['benchmarks'])
def find_test(name):
for b in json2['benchmarks']:
if b['name'] == name:
return b
return None
first_col_width = max(first_col_width, len('Benchmark'))
first_line = "{:<{}s}Time CPU Time Old Time New CPU Old CPU New".format(
'Benchmark', 12 + first_col_width)
output_strs = [first_line, '-' * len(first_line)]
gen = (bn for bn in json1['benchmarks'] if 'real_time' in bn and 'cpu_time' in bn)
for bn in gen:
other_bench = find_test(bn['name'])
if not other_bench:
continue
if bn['time_unit'] != other_bench['time_unit']:
continue
def get_color(res):
if res > 0.05:
return BC_FAIL
elif res > -0.07:
return BC_WHITE
else:
return BC_CYAN
fmt_str = "{}{:<{}s}{endc}{}{:+16.4f}{endc}{}{:+16.4f}{endc}{:14.0f}{:14.0f}{endc}{:14.0f}{:14.0f}"
tres = calculate_change(bn['real_time'], other_bench['real_time'])
cpures = calculate_change(bn['cpu_time'], other_bench['cpu_time'])
output_strs += [color_format(use_color, fmt_str,
BC_HEADER, bn['name'], first_col_width,
get_color(tres), tres, get_color(cpures), cpures,
bn['real_time'], other_bench['real_time'],
bn['cpu_time'], other_bench['cpu_time'],
endc=BC_ENDC)]
return output_strs
|
[
"def",
"generate_difference_report",
"(",
"json1",
",",
"json2",
",",
"use_color",
"=",
"True",
")",
":",
"first_col_width",
"=",
"find_longest_name",
"(",
"json1",
"[",
"'benchmarks'",
"]",
")",
"def",
"find_test",
"(",
"name",
")",
":",
"for",
"b",
"in",
"json2",
"[",
"'benchmarks'",
"]",
":",
"if",
"b",
"[",
"'name'",
"]",
"==",
"name",
":",
"return",
"b",
"return",
"None",
"first_col_width",
"=",
"max",
"(",
"first_col_width",
",",
"len",
"(",
"'Benchmark'",
")",
")",
"first_line",
"=",
"\"{:<{}s}Time CPU Time Old Time New CPU Old CPU New\"",
".",
"format",
"(",
"'Benchmark'",
",",
"12",
"+",
"first_col_width",
")",
"output_strs",
"=",
"[",
"first_line",
",",
"'-'",
"*",
"len",
"(",
"first_line",
")",
"]",
"gen",
"=",
"(",
"bn",
"for",
"bn",
"in",
"json1",
"[",
"'benchmarks'",
"]",
"if",
"'real_time'",
"in",
"bn",
"and",
"'cpu_time'",
"in",
"bn",
")",
"for",
"bn",
"in",
"gen",
":",
"other_bench",
"=",
"find_test",
"(",
"bn",
"[",
"'name'",
"]",
")",
"if",
"not",
"other_bench",
":",
"continue",
"if",
"bn",
"[",
"'time_unit'",
"]",
"!=",
"other_bench",
"[",
"'time_unit'",
"]",
":",
"continue",
"def",
"get_color",
"(",
"res",
")",
":",
"if",
"res",
">",
"0.05",
":",
"return",
"BC_FAIL",
"elif",
"res",
">",
"-",
"0.07",
":",
"return",
"BC_WHITE",
"else",
":",
"return",
"BC_CYAN",
"fmt_str",
"=",
"\"{}{:<{}s}{endc}{}{:+16.4f}{endc}{}{:+16.4f}{endc}{:14.0f}{:14.0f}{endc}{:14.0f}{:14.0f}\"",
"tres",
"=",
"calculate_change",
"(",
"bn",
"[",
"'real_time'",
"]",
",",
"other_bench",
"[",
"'real_time'",
"]",
")",
"cpures",
"=",
"calculate_change",
"(",
"bn",
"[",
"'cpu_time'",
"]",
",",
"other_bench",
"[",
"'cpu_time'",
"]",
")",
"output_strs",
"+=",
"[",
"color_format",
"(",
"use_color",
",",
"fmt_str",
",",
"BC_HEADER",
",",
"bn",
"[",
"'name'",
"]",
",",
"first_col_width",
",",
"get_color",
"(",
"tres",
")",
",",
"tres",
",",
"get_color",
"(",
"cpures",
")",
",",
"cpures",
",",
"bn",
"[",
"'real_time'",
"]",
",",
"other_bench",
"[",
"'real_time'",
"]",
",",
"bn",
"[",
"'cpu_time'",
"]",
",",
"other_bench",
"[",
"'cpu_time'",
"]",
",",
"endc",
"=",
"BC_ENDC",
")",
"]",
"return",
"output_strs"
] |
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/llvm/utils/benchmark/tools/gbench/report.py#L87-L128
|
|
hpi-xnor/BMXNet-v2
|
af2b1859eafc5c721b1397cef02f946aaf2ce20d
|
tools/caffe_translator/scripts/convert_caffe_model.py
|
python
|
CaffeModelConverter.add_aux_param
|
(self, param_name, layer_index, blob_index)
|
Add an aux param to .params file. Example: moving_mean in BatchNorm layer
|
Add an aux param to .params file. Example: moving_mean in BatchNorm layer
|
[
"Add",
"an",
"aux",
"param",
"to",
".",
"params",
"file",
".",
"Example",
":",
"moving_mean",
"in",
"BatchNorm",
"layer"
] |
def add_aux_param(self, param_name, layer_index, blob_index):
"""Add an aux param to .params file. Example: moving_mean in BatchNorm layer """
self.add_param('aux:%s' % param_name, layer_index, blob_index)
|
[
"def",
"add_aux_param",
"(",
"self",
",",
"param_name",
",",
"layer_index",
",",
"blob_index",
")",
":",
"self",
".",
"add_param",
"(",
"'aux:%s'",
"%",
"param_name",
",",
"layer_index",
",",
"blob_index",
")"
] |
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/tools/caffe_translator/scripts/convert_caffe_model.py#L42-L44
|
||
JumpingYang001/webrtc
|
c03d6e965e1f54aeadd670e491eabe5fdb8db968
|
tools_webrtc/perf/catapult_uploader.py
|
python
|
_WaitForUploadConfirmation
|
(url, upload_token, wait_timeout,
wait_polling_period)
|
return response, resp_json
|
Make a HTTP GET requests to the Performance Dashboard untill upload
status is known or the time is out.
Args:
url: URL of Performance Dashboard instance, e.g.
"https://chromeperf.appspot.com".
upload_token: String that identifies Performance Dashboard and can be used
for the status check.
wait_timeout: (datetime.timedelta) Maximum time to wait for the
confirmation.
wait_polling_period: (datetime.timedelta) Performance Dashboard will be
polled every wait_polling_period amount of time.
|
Make a HTTP GET requests to the Performance Dashboard untill upload
status is known or the time is out.
|
[
"Make",
"a",
"HTTP",
"GET",
"requests",
"to",
"the",
"Performance",
"Dashboard",
"untill",
"upload",
"status",
"is",
"known",
"or",
"the",
"time",
"is",
"out",
"."
] |
def _WaitForUploadConfirmation(url, upload_token, wait_timeout,
wait_polling_period):
"""Make a HTTP GET requests to the Performance Dashboard untill upload
status is known or the time is out.
Args:
url: URL of Performance Dashboard instance, e.g.
"https://chromeperf.appspot.com".
upload_token: String that identifies Performance Dashboard and can be used
for the status check.
wait_timeout: (datetime.timedelta) Maximum time to wait for the
confirmation.
wait_polling_period: (datetime.timedelta) Performance Dashboard will be
polled every wait_polling_period amount of time.
"""
assert wait_polling_period <= wait_timeout
headers = _CreateHeaders(_GenerateOauthToken())
http = httplib2.Http()
oauth_refreshed = False
response = None
resp_json = None
current_time = datetime.datetime.now()
end_time = current_time + wait_timeout
next_poll_time = current_time + wait_polling_period
while datetime.datetime.now() < end_time:
current_time = datetime.datetime.now()
if next_poll_time > current_time:
time.sleep((next_poll_time - current_time).total_seconds())
next_poll_time = datetime.datetime.now() + wait_polling_period
response, content = http.request(url + '/uploads/' + upload_token,
method='GET', headers=headers)
print 'Upload state polled. Response: %r.' % content
if not oauth_refreshed and response.status == 403:
print 'Oauth token refreshed. Continue polling.'
headers = _CreateHeaders(_GenerateOauthToken())
oauth_refreshed = True
continue
if response.status != 200:
break
resp_json = json.loads(content)
if resp_json['state'] == 'COMPLETED' or resp_json['state'] == 'FAILED':
break
return response, resp_json
|
[
"def",
"_WaitForUploadConfirmation",
"(",
"url",
",",
"upload_token",
",",
"wait_timeout",
",",
"wait_polling_period",
")",
":",
"assert",
"wait_polling_period",
"<=",
"wait_timeout",
"headers",
"=",
"_CreateHeaders",
"(",
"_GenerateOauthToken",
"(",
")",
")",
"http",
"=",
"httplib2",
".",
"Http",
"(",
")",
"oauth_refreshed",
"=",
"False",
"response",
"=",
"None",
"resp_json",
"=",
"None",
"current_time",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"end_time",
"=",
"current_time",
"+",
"wait_timeout",
"next_poll_time",
"=",
"current_time",
"+",
"wait_polling_period",
"while",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"<",
"end_time",
":",
"current_time",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"if",
"next_poll_time",
">",
"current_time",
":",
"time",
".",
"sleep",
"(",
"(",
"next_poll_time",
"-",
"current_time",
")",
".",
"total_seconds",
"(",
")",
")",
"next_poll_time",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"+",
"wait_polling_period",
"response",
",",
"content",
"=",
"http",
".",
"request",
"(",
"url",
"+",
"'/uploads/'",
"+",
"upload_token",
",",
"method",
"=",
"'GET'",
",",
"headers",
"=",
"headers",
")",
"print",
"'Upload state polled. Response: %r.'",
"%",
"content",
"if",
"not",
"oauth_refreshed",
"and",
"response",
".",
"status",
"==",
"403",
":",
"print",
"'Oauth token refreshed. Continue polling.'",
"headers",
"=",
"_CreateHeaders",
"(",
"_GenerateOauthToken",
"(",
")",
")",
"oauth_refreshed",
"=",
"True",
"continue",
"if",
"response",
".",
"status",
"!=",
"200",
":",
"break",
"resp_json",
"=",
"json",
".",
"loads",
"(",
"content",
")",
"if",
"resp_json",
"[",
"'state'",
"]",
"==",
"'COMPLETED'",
"or",
"resp_json",
"[",
"'state'",
"]",
"==",
"'FAILED'",
":",
"break",
"return",
"response",
",",
"resp_json"
] |
https://github.com/JumpingYang001/webrtc/blob/c03d6e965e1f54aeadd670e491eabe5fdb8db968/tools_webrtc/perf/catapult_uploader.py#L67-L117
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/ops/math_grad.py
|
python
|
_MinOrMaxGrad
|
(op, grad)
|
return [math_ops.divide(indicators, num_selected) * grad, None]
|
Gradient for Min or Max. Amazingly it's precisely the same code.
|
Gradient for Min or Max. Amazingly it's precisely the same code.
|
[
"Gradient",
"for",
"Min",
"or",
"Max",
".",
"Amazingly",
"it",
"s",
"precisely",
"the",
"same",
"code",
"."
] |
def _MinOrMaxGrad(op, grad):
"""Gradient for Min or Max. Amazingly it's precisely the same code."""
input_shape = array_ops.shape(op.inputs[0])
y = op.outputs[0]
if not op.get_attr("keep_dims"):
output_shape_kept_dims = math_ops.reduced_shape(input_shape, op.inputs[1])
y = array_ops.reshape(y, output_shape_kept_dims)
grad = array_ops.reshape(grad, output_shape_kept_dims)
else:
output_shape_kept_dims = array_ops.shape(y)
# Compute the number of selected (maximum or minimum) elements in each
# reduction dimension. If there are multiple minimum or maximum elements
# then the gradient will be divided between them.
indicators = math_ops.cast(math_ops.equal(y, op.inputs[0]), grad.dtype)
num_selected = array_ops.reshape(
math_ops.reduce_sum(indicators, op.inputs[1]), output_shape_kept_dims)
return [math_ops.divide(indicators, num_selected) * grad, None]
|
[
"def",
"_MinOrMaxGrad",
"(",
"op",
",",
"grad",
")",
":",
"input_shape",
"=",
"array_ops",
".",
"shape",
"(",
"op",
".",
"inputs",
"[",
"0",
"]",
")",
"y",
"=",
"op",
".",
"outputs",
"[",
"0",
"]",
"if",
"not",
"op",
".",
"get_attr",
"(",
"\"keep_dims\"",
")",
":",
"output_shape_kept_dims",
"=",
"math_ops",
".",
"reduced_shape",
"(",
"input_shape",
",",
"op",
".",
"inputs",
"[",
"1",
"]",
")",
"y",
"=",
"array_ops",
".",
"reshape",
"(",
"y",
",",
"output_shape_kept_dims",
")",
"grad",
"=",
"array_ops",
".",
"reshape",
"(",
"grad",
",",
"output_shape_kept_dims",
")",
"else",
":",
"output_shape_kept_dims",
"=",
"array_ops",
".",
"shape",
"(",
"y",
")",
"# Compute the number of selected (maximum or minimum) elements in each",
"# reduction dimension. If there are multiple minimum or maximum elements",
"# then the gradient will be divided between them.",
"indicators",
"=",
"math_ops",
".",
"cast",
"(",
"math_ops",
".",
"equal",
"(",
"y",
",",
"op",
".",
"inputs",
"[",
"0",
"]",
")",
",",
"grad",
".",
"dtype",
")",
"num_selected",
"=",
"array_ops",
".",
"reshape",
"(",
"math_ops",
".",
"reduce_sum",
"(",
"indicators",
",",
"op",
".",
"inputs",
"[",
"1",
"]",
")",
",",
"output_shape_kept_dims",
")",
"return",
"[",
"math_ops",
".",
"divide",
"(",
"indicators",
",",
"num_selected",
")",
"*",
"grad",
",",
"None",
"]"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/math_grad.py#L217-L235
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/aui/framemanager.py
|
python
|
AuiPaneInfo.IsDockable
|
(self)
|
return self.IsTopDockable() or self.IsBottomDockable() or self.IsLeftDockable() or \
self.IsRightDockable() or self.IsNotebookDockable()
|
Returns ``True`` if the pane can be docked.
|
Returns ``True`` if the pane can be docked.
|
[
"Returns",
"True",
"if",
"the",
"pane",
"can",
"be",
"docked",
"."
] |
def IsDockable(self):
""" Returns ``True`` if the pane can be docked. """
return self.IsTopDockable() or self.IsBottomDockable() or self.IsLeftDockable() or \
self.IsRightDockable() or self.IsNotebookDockable()
|
[
"def",
"IsDockable",
"(",
"self",
")",
":",
"return",
"self",
".",
"IsTopDockable",
"(",
")",
"or",
"self",
".",
"IsBottomDockable",
"(",
")",
"or",
"self",
".",
"IsLeftDockable",
"(",
")",
"or",
"self",
".",
"IsRightDockable",
"(",
")",
"or",
"self",
".",
"IsNotebookDockable",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/framemanager.py#L689-L693
|
|
microsoft/TSS.MSR
|
0f2516fca2cd9929c31d5450e39301c9bde43688
|
TSS.Py/src/TpmTypes.py
|
python
|
TPM2_ReadClock_REQUEST.__init__
|
(self)
|
This command reads the current TPMS_TIME_INFO structure that
contains the current setting of Time, Clock, resetCount, and restartCount.
|
This command reads the current TPMS_TIME_INFO structure that
contains the current setting of Time, Clock, resetCount, and restartCount.
|
[
"This",
"command",
"reads",
"the",
"current",
"TPMS_TIME_INFO",
"structure",
"that",
"contains",
"the",
"current",
"setting",
"of",
"Time",
"Clock",
"resetCount",
"and",
"restartCount",
"."
] |
def __init__(self):
""" This command reads the current TPMS_TIME_INFO structure that
contains the current setting of Time, Clock, resetCount, and restartCount.
"""
pass
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"pass"
] |
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L16282-L16286
|
||
ninja-build/ninja
|
f404f0059d71c8c86da7b56c48794266b5befd10
|
misc/write_fake_manifests.py
|
python
|
FileWriter
|
(path)
|
Context manager for a ninja_syntax object writing to a file.
|
Context manager for a ninja_syntax object writing to a file.
|
[
"Context",
"manager",
"for",
"a",
"ninja_syntax",
"object",
"writing",
"to",
"a",
"file",
"."
] |
def FileWriter(path):
"""Context manager for a ninja_syntax object writing to a file."""
try:
os.makedirs(os.path.dirname(path))
except OSError:
pass
f = open(path, 'w')
yield ninja_syntax.Writer(f)
f.close()
|
[
"def",
"FileWriter",
"(",
"path",
")",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
")",
"except",
"OSError",
":",
"pass",
"f",
"=",
"open",
"(",
"path",
",",
"'w'",
")",
"yield",
"ninja_syntax",
".",
"Writer",
"(",
"f",
")",
"f",
".",
"close",
"(",
")"
] |
https://github.com/ninja-build/ninja/blob/f404f0059d71c8c86da7b56c48794266b5befd10/misc/write_fake_manifests.py#L215-L223
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/vitis/common/vitis_quantize_strategy.py
|
python
|
VitisQuantizeStrategy.update
|
(self, qs_configs)
|
Update the current configurations by overriding.
Args:
new_config: String, file name of the new quantize strategy configurations.
Returns:
None
|
Update the current configurations by overriding.
|
[
"Update",
"the",
"current",
"configurations",
"by",
"overriding",
"."
] |
def update(self, qs_configs):
"""Update the current configurations by overriding.
Args:
new_config: String, file name of the new quantize strategy configurations.
Returns:
None
"""
if 'quantize_registry_config' in qs_configs:
self._quantize_registry.update(qs_configs.pop('quantize_registry_config'))
if 'optimize_pipeline_config' in qs_configs:
self._optimize_pipeline.update(qs_configs.pop('optimize_pipeline_config'))
if 'quantize_pipeline_config' in qs_configs:
self._quantize_pipeline.update(qs_configs.pop('quantize_pipeline_config'))
invalid_configs = []
while qs_configs:
config = qs_configs.popitem()
if self._quantize_registry.is_valid_config(config):
self._quantize_registry.update(config)
elif self._optimize_pipeline.is_valid_config(config):
self._optimize_pipeline.update(config)
elif self._quantize_pipeline.is_valid_config(config):
self._quantize_pipeline.update(config)
else:
invalid_configs.append(config)
# Check for invalid configurations
if invalid_configs:
logger.error('Invalid configs: {}'.format(invalid_configs))
self._qs_configs.update({
'quantize_registry_config': self._quantize_registry.get_configs(),
'optimize_pipeline_config': self._optimize_pipeline.get_configs(),
'quantize_pipeline_config': self._quantize_pipeline.get_configs()
})
|
[
"def",
"update",
"(",
"self",
",",
"qs_configs",
")",
":",
"if",
"'quantize_registry_config'",
"in",
"qs_configs",
":",
"self",
".",
"_quantize_registry",
".",
"update",
"(",
"qs_configs",
".",
"pop",
"(",
"'quantize_registry_config'",
")",
")",
"if",
"'optimize_pipeline_config'",
"in",
"qs_configs",
":",
"self",
".",
"_optimize_pipeline",
".",
"update",
"(",
"qs_configs",
".",
"pop",
"(",
"'optimize_pipeline_config'",
")",
")",
"if",
"'quantize_pipeline_config'",
"in",
"qs_configs",
":",
"self",
".",
"_quantize_pipeline",
".",
"update",
"(",
"qs_configs",
".",
"pop",
"(",
"'quantize_pipeline_config'",
")",
")",
"invalid_configs",
"=",
"[",
"]",
"while",
"qs_configs",
":",
"config",
"=",
"qs_configs",
".",
"popitem",
"(",
")",
"if",
"self",
".",
"_quantize_registry",
".",
"is_valid_config",
"(",
"config",
")",
":",
"self",
".",
"_quantize_registry",
".",
"update",
"(",
"config",
")",
"elif",
"self",
".",
"_optimize_pipeline",
".",
"is_valid_config",
"(",
"config",
")",
":",
"self",
".",
"_optimize_pipeline",
".",
"update",
"(",
"config",
")",
"elif",
"self",
".",
"_quantize_pipeline",
".",
"is_valid_config",
"(",
"config",
")",
":",
"self",
".",
"_quantize_pipeline",
".",
"update",
"(",
"config",
")",
"else",
":",
"invalid_configs",
".",
"append",
"(",
"config",
")",
"# Check for invalid configurations",
"if",
"invalid_configs",
":",
"logger",
".",
"error",
"(",
"'Invalid configs: {}'",
".",
"format",
"(",
"invalid_configs",
")",
")",
"self",
".",
"_qs_configs",
".",
"update",
"(",
"{",
"'quantize_registry_config'",
":",
"self",
".",
"_quantize_registry",
".",
"get_configs",
"(",
")",
",",
"'optimize_pipeline_config'",
":",
"self",
".",
"_optimize_pipeline",
".",
"get_configs",
"(",
")",
",",
"'quantize_pipeline_config'",
":",
"self",
".",
"_quantize_pipeline",
".",
"get_configs",
"(",
")",
"}",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow2.x/tensorflow_model_optimization/python/core/quantization/keras/vitis/common/vitis_quantize_strategy.py#L27-L64
|
||
CaoWGG/TensorRT-CenterNet
|
f949252e37b51e60f873808f46d3683f15735e79
|
onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py
|
python
|
Cursor.walk_preorder
|
(self)
|
Depth-first preorder walk over the cursor and its descendants.
Yields cursors.
|
Depth-first preorder walk over the cursor and its descendants.
|
[
"Depth",
"-",
"first",
"preorder",
"walk",
"over",
"the",
"cursor",
"and",
"its",
"descendants",
"."
] |
def walk_preorder(self):
"""Depth-first preorder walk over the cursor and its descendants.
Yields cursors.
"""
yield self
for child in self.get_children():
for descendant in child.walk_preorder():
yield descendant
|
[
"def",
"walk_preorder",
"(",
"self",
")",
":",
"yield",
"self",
"for",
"child",
"in",
"self",
".",
"get_children",
"(",
")",
":",
"for",
"descendant",
"in",
"child",
".",
"walk_preorder",
"(",
")",
":",
"yield",
"descendant"
] |
https://github.com/CaoWGG/TensorRT-CenterNet/blob/f949252e37b51e60f873808f46d3683f15735e79/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L1661-L1669
|
||
anestisb/oatdump_plus
|
ba858c1596598f0d9ae79c14d08c708cecc50af3
|
tools/cpplint.py
|
python
|
FilesBelongToSameModule
|
(filename_cc, filename_h)
|
return files_belong_to_same_module, common_path
|
Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cc file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
|
Check if these two filenames belong to the same module.
|
[
"Check",
"if",
"these",
"two",
"filenames",
"belong",
"to",
"the",
"same",
"module",
"."
] |
def FilesBelongToSameModule(filename_cc, filename_h):
"""Check if these two filenames belong to the same module.
The concept of a 'module' here is a as follows:
foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the
same 'module' if they are in the same directory.
some/path/public/xyzzy and some/path/internal/xyzzy are also considered
to belong to the same module here.
If the filename_cc contains a longer path than the filename_h, for example,
'/absolute/path/to/base/sysinfo.cc', and this file would include
'base/sysinfo.h', this function also produces the prefix needed to open the
header. This is used by the caller of this function to more robustly open the
header file. We don't have access to the real include paths in this context,
so we need this guesswork here.
Known bugs: tools/base/bar.cc and base/bar.h belong to the same module
according to this implementation. Because of this, this function gives
some false positives. This should be sufficiently rare in practice.
Args:
filename_cc: is the path for the .cc file
filename_h: is the path for the header path
Returns:
Tuple with a bool and a string:
bool: True if filename_cc and filename_h belong to the same module.
string: the additional prefix needed to open the header file.
"""
if not filename_cc.endswith('.cc'):
return (False, '')
filename_cc = filename_cc[:-len('.cc')]
if filename_cc.endswith('_unittest'):
filename_cc = filename_cc[:-len('_unittest')]
elif filename_cc.endswith('_test'):
filename_cc = filename_cc[:-len('_test')]
filename_cc = filename_cc.replace('/public/', '/')
filename_cc = filename_cc.replace('/internal/', '/')
if not filename_h.endswith('.h'):
return (False, '')
filename_h = filename_h[:-len('.h')]
if filename_h.endswith('-inl'):
filename_h = filename_h[:-len('-inl')]
filename_h = filename_h.replace('/public/', '/')
filename_h = filename_h.replace('/internal/', '/')
files_belong_to_same_module = filename_cc.endswith(filename_h)
common_path = ''
if files_belong_to_same_module:
common_path = filename_cc[:-len(filename_h)]
return files_belong_to_same_module, common_path
|
[
"def",
"FilesBelongToSameModule",
"(",
"filename_cc",
",",
"filename_h",
")",
":",
"if",
"not",
"filename_cc",
".",
"endswith",
"(",
"'.cc'",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'.cc'",
")",
"]",
"if",
"filename_cc",
".",
"endswith",
"(",
"'_unittest'",
")",
":",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'_unittest'",
")",
"]",
"elif",
"filename_cc",
".",
"endswith",
"(",
"'_test'",
")",
":",
"filename_cc",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"'_test'",
")",
"]",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_cc",
"=",
"filename_cc",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"if",
"not",
"filename_h",
".",
"endswith",
"(",
"'.h'",
")",
":",
"return",
"(",
"False",
",",
"''",
")",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"len",
"(",
"'.h'",
")",
"]",
"if",
"filename_h",
".",
"endswith",
"(",
"'-inl'",
")",
":",
"filename_h",
"=",
"filename_h",
"[",
":",
"-",
"len",
"(",
"'-inl'",
")",
"]",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/public/'",
",",
"'/'",
")",
"filename_h",
"=",
"filename_h",
".",
"replace",
"(",
"'/internal/'",
",",
"'/'",
")",
"files_belong_to_same_module",
"=",
"filename_cc",
".",
"endswith",
"(",
"filename_h",
")",
"common_path",
"=",
"''",
"if",
"files_belong_to_same_module",
":",
"common_path",
"=",
"filename_cc",
"[",
":",
"-",
"len",
"(",
"filename_h",
")",
"]",
"return",
"files_belong_to_same_module",
",",
"common_path"
] |
https://github.com/anestisb/oatdump_plus/blob/ba858c1596598f0d9ae79c14d08c708cecc50af3/tools/cpplint.py#L3616-L3668
|
|
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/python/framework/tensor_shape.py
|
python
|
Dimension.value
|
(self)
|
return self._value
|
The value of this dimension, or None if it is unknown.
|
The value of this dimension, or None if it is unknown.
|
[
"The",
"value",
"of",
"this",
"dimension",
"or",
"None",
"if",
"it",
"is",
"unknown",
"."
] |
def value(self):
"""The value of this dimension, or None if it is unknown."""
return self._value
|
[
"def",
"value",
"(",
"self",
")",
":",
"return",
"self",
".",
"_value"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/framework/tensor_shape.py#L75-L77
|
|
ablab/spades
|
3a754192b88540524ce6fb69eef5ea9273a38465
|
assembler/src/tools/reads_utils/ideal_by_fasta.py
|
python
|
read_fasta
|
(filename)
|
return zip(res_name, res_seq)
|
Returns list of FASTA entries (in tuples: name, seq)
|
Returns list of FASTA entries (in tuples: name, seq)
|
[
"Returns",
"list",
"of",
"FASTA",
"entries",
"(",
"in",
"tuples",
":",
"name",
"seq",
")"
] |
def read_fasta(filename):
"""
Returns list of FASTA entries (in tuples: name, seq)
"""
res_name = []
res_seq = []
first = True
seq = ''
for line in open(filename):
if line[0] == '>':
res_name.append(line.strip())
if not first:
res_seq.append(seq)
else:
first = False
seq = ''
else:
seq += line.strip()
res_seq.append(seq)
return zip(res_name, res_seq)
|
[
"def",
"read_fasta",
"(",
"filename",
")",
":",
"res_name",
"=",
"[",
"]",
"res_seq",
"=",
"[",
"]",
"first",
"=",
"True",
"seq",
"=",
"''",
"for",
"line",
"in",
"open",
"(",
"filename",
")",
":",
"if",
"line",
"[",
"0",
"]",
"==",
"'>'",
":",
"res_name",
".",
"append",
"(",
"line",
".",
"strip",
"(",
")",
")",
"if",
"not",
"first",
":",
"res_seq",
".",
"append",
"(",
"seq",
")",
"else",
":",
"first",
"=",
"False",
"seq",
"=",
"''",
"else",
":",
"seq",
"+=",
"line",
".",
"strip",
"(",
")",
"res_seq",
".",
"append",
"(",
"seq",
")",
"return",
"zip",
"(",
"res_name",
",",
"res_seq",
")"
] |
https://github.com/ablab/spades/blob/3a754192b88540524ce6fb69eef5ea9273a38465/assembler/src/tools/reads_utils/ideal_by_fasta.py#L15-L35
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
torch/cuda/__init__.py
|
python
|
is_bf16_supported
|
()
|
return torch.cuda.get_device_properties(torch.cuda.current_device()).major >= 8 and cuda_maj_decide
|
r"""Returns a bool indicating if the current CUDA device supports dtype bfloat16
|
r"""Returns a bool indicating if the current CUDA device supports dtype bfloat16
|
[
"r",
"Returns",
"a",
"bool",
"indicating",
"if",
"the",
"current",
"CUDA",
"device",
"supports",
"dtype",
"bfloat16"
] |
def is_bf16_supported():
r"""Returns a bool indicating if the current CUDA device supports dtype bfloat16"""
cu_vers = torch.version.cuda
if cu_vers is not None:
cuda_maj_decide = int(cu_vers.split('.')[0]) >= 11
else:
cuda_maj_decide = False
return torch.cuda.get_device_properties(torch.cuda.current_device()).major >= 8 and cuda_maj_decide
|
[
"def",
"is_bf16_supported",
"(",
")",
":",
"cu_vers",
"=",
"torch",
".",
"version",
".",
"cuda",
"if",
"cu_vers",
"is",
"not",
"None",
":",
"cuda_maj_decide",
"=",
"int",
"(",
"cu_vers",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
")",
">=",
"11",
"else",
":",
"cuda_maj_decide",
"=",
"False",
"return",
"torch",
".",
"cuda",
".",
"get_device_properties",
"(",
"torch",
".",
"cuda",
".",
"current_device",
"(",
")",
")",
".",
"major",
">=",
"8",
"and",
"cuda_maj_decide"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/cuda/__init__.py#L84-L92
|
|
tensorflow/minigo
|
6d89c202cdceaf449aefc3149ab2110d44f1a6a4
|
oneoffs/joseki/opening_freqs_export.py
|
python
|
main
|
(_)
|
Entrypoint for absl.app
|
Entrypoint for absl.app
|
[
"Entrypoint",
"for",
"absl",
".",
"app"
] |
def main(_):
""" Entrypoint for absl.app """
create_top_report(FLAGS.top_n)
|
[
"def",
"main",
"(",
"_",
")",
":",
"create_top_report",
"(",
"FLAGS",
".",
"top_n",
")"
] |
https://github.com/tensorflow/minigo/blob/6d89c202cdceaf449aefc3149ab2110d44f1a6a4/oneoffs/joseki/opening_freqs_export.py#L276-L278
|
||
PaddlePaddle/Paddle
|
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
|
python/paddle/fluid/dataset.py
|
python
|
InMemoryDataset.set_merge_by_lineid
|
(self, merge_size=2)
|
Set merge by line id, instances of same line id will be merged after
shuffle, you should parse line id in data generator.
Args:
merge_size(int): ins size to merge. default is 2.
Examples:
.. code-block:: python
import paddle.fluid as fluid
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
dataset.set_merge_by_lineid()
|
Set merge by line id, instances of same line id will be merged after
shuffle, you should parse line id in data generator.
|
[
"Set",
"merge",
"by",
"line",
"id",
"instances",
"of",
"same",
"line",
"id",
"will",
"be",
"merged",
"after",
"shuffle",
"you",
"should",
"parse",
"line",
"id",
"in",
"data",
"generator",
"."
] |
def set_merge_by_lineid(self, merge_size=2):
"""
Set merge by line id, instances of same line id will be merged after
shuffle, you should parse line id in data generator.
Args:
merge_size(int): ins size to merge. default is 2.
Examples:
.. code-block:: python
import paddle.fluid as fluid
dataset = fluid.DatasetFactory().create_dataset("InMemoryDataset")
dataset.set_merge_by_lineid()
"""
self.dataset.set_merge_by_lineid(merge_size)
self.merge_by_lineid = True
self.parse_ins_id = True
|
[
"def",
"set_merge_by_lineid",
"(",
"self",
",",
"merge_size",
"=",
"2",
")",
":",
"self",
".",
"dataset",
".",
"set_merge_by_lineid",
"(",
"merge_size",
")",
"self",
".",
"merge_by_lineid",
"=",
"True",
"self",
".",
"parse_ins_id",
"=",
"True"
] |
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dataset.py#L681-L699
|
||
raymondlu/super-animation-samples
|
04234269112ff0dc32447f27a761dbbb00b8ba17
|
samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py
|
python
|
register_functions
|
(lib, ignore_errors)
|
Register function prototypes with a libclang library instance.
This must be called as part of library instantiation so Python knows how
to call out to the shared library.
|
Register function prototypes with a libclang library instance.
|
[
"Register",
"function",
"prototypes",
"with",
"a",
"libclang",
"library",
"instance",
"."
] |
def register_functions(lib, ignore_errors):
"""Register function prototypes with a libclang library instance.
This must be called as part of library instantiation so Python knows how
to call out to the shared library.
"""
def register(item):
return register_function(lib, item, ignore_errors)
map(register, functionList)
|
[
"def",
"register_functions",
"(",
"lib",
",",
"ignore_errors",
")",
":",
"def",
"register",
"(",
"item",
")",
":",
"return",
"register_function",
"(",
"lib",
",",
"item",
",",
"ignore_errors",
")",
"map",
"(",
"register",
",",
"functionList",
")"
] |
https://github.com/raymondlu/super-animation-samples/blob/04234269112ff0dc32447f27a761dbbb00b8ba17/samples/cocos2d-x-3.1/CocosLuaGame2/frameworks/cocos2d-x/tools/bindings-generator/clang/cindex.py#L3297-L3307
|
||
htcondor/htcondor
|
4829724575176d1d6c936e4693dfd78a728569b0
|
src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/skype.py
|
python
|
ISkype.SendCommand
|
(self, Command)
|
Sends an API command.
@param Command: Command to send. Use L{Command} method to create a command.
@type Command: L{ICommand}
|
Sends an API command.
|
[
"Sends",
"an",
"API",
"command",
"."
] |
def SendCommand(self, Command):
'''Sends an API command.
@param Command: Command to send. Use L{Command} method to create a command.
@type Command: L{ICommand}
'''
try:
self._API.SendCommand(Command)
except ISkypeAPIError:
self.ResetCache()
raise
|
[
"def",
"SendCommand",
"(",
"self",
",",
"Command",
")",
":",
"try",
":",
"self",
".",
"_API",
".",
"SendCommand",
"(",
"Command",
")",
"except",
"ISkypeAPIError",
":",
"self",
".",
"ResetCache",
"(",
")",
"raise"
] |
https://github.com/htcondor/htcondor/blob/4829724575176d1d6c936e4693dfd78a728569b0/src/condor_contrib/condor_pigeon/src/condor_pigeon_client/skype_linux_tools/Skype4Py/skype.py#L794-L804
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/cgi.py
|
python
|
FieldStorage.skip_lines
|
(self)
|
Internal: skip lines until outer boundary if defined.
|
Internal: skip lines until outer boundary if defined.
|
[
"Internal",
":",
"skip",
"lines",
"until",
"outer",
"boundary",
"if",
"defined",
"."
] |
def skip_lines(self):
"""Internal: skip lines until outer boundary if defined."""
if not self.outerboundary or self.done:
return
next = "--" + self.outerboundary
last = next + "--"
last_line_lfend = True
while 1:
line = self.fp.readline(1<<16)
if not line:
self.done = -1
break
if line[:2] == "--" and last_line_lfend:
strippedline = line.strip()
if strippedline == next:
break
if strippedline == last:
self.done = 1
break
last_line_lfend = line.endswith('\n')
|
[
"def",
"skip_lines",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"outerboundary",
"or",
"self",
".",
"done",
":",
"return",
"next",
"=",
"\"--\"",
"+",
"self",
".",
"outerboundary",
"last",
"=",
"next",
"+",
"\"--\"",
"last_line_lfend",
"=",
"True",
"while",
"1",
":",
"line",
"=",
"self",
".",
"fp",
".",
"readline",
"(",
"1",
"<<",
"16",
")",
"if",
"not",
"line",
":",
"self",
".",
"done",
"=",
"-",
"1",
"break",
"if",
"line",
"[",
":",
"2",
"]",
"==",
"\"--\"",
"and",
"last_line_lfend",
":",
"strippedline",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"strippedline",
"==",
"next",
":",
"break",
"if",
"strippedline",
"==",
"last",
":",
"self",
".",
"done",
"=",
"1",
"break",
"last_line_lfend",
"=",
"line",
".",
"endswith",
"(",
"'\\n'",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/cgi.py#L721-L740
|
||
albertz/openlierox
|
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
|
tools/DedicatedServerVideo/gdata/spreadsheet/service.py
|
python
|
SpreadsheetsService.UpdateRow
|
(self, entry, new_row_data)
|
Updates a row with the provided data
Args:
entry: gdata.spreadsheet.SpreadsheetsList The entry to be updated
new_row_data: dict A dictionary of column header to row data
Returns:
The updated row
|
Updates a row with the provided data
Args:
entry: gdata.spreadsheet.SpreadsheetsList The entry to be updated
new_row_data: dict A dictionary of column header to row data
Returns:
The updated row
|
[
"Updates",
"a",
"row",
"with",
"the",
"provided",
"data",
"Args",
":",
"entry",
":",
"gdata",
".",
"spreadsheet",
".",
"SpreadsheetsList",
"The",
"entry",
"to",
"be",
"updated",
"new_row_data",
":",
"dict",
"A",
"dictionary",
"of",
"column",
"header",
"to",
"row",
"data",
"Returns",
":",
"The",
"updated",
"row"
] |
def UpdateRow(self, entry, new_row_data):
"""Updates a row with the provided data
Args:
entry: gdata.spreadsheet.SpreadsheetsList The entry to be updated
new_row_data: dict A dictionary of column header to row data
Returns:
The updated row
"""
entry.custom = {}
for k, v in new_row_data.iteritems():
new_custom = gdata.spreadsheet.Custom()
new_custom.column = k
new_custom.text = v
entry.custom[k] = new_custom
for a_link in entry.link:
if a_link.rel == 'edit':
return self.Put(entry, a_link.href,
converter=gdata.spreadsheet.SpreadsheetsListFromString)
|
[
"def",
"UpdateRow",
"(",
"self",
",",
"entry",
",",
"new_row_data",
")",
":",
"entry",
".",
"custom",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"new_row_data",
".",
"iteritems",
"(",
")",
":",
"new_custom",
"=",
"gdata",
".",
"spreadsheet",
".",
"Custom",
"(",
")",
"new_custom",
".",
"column",
"=",
"k",
"new_custom",
".",
"text",
"=",
"v",
"entry",
".",
"custom",
"[",
"k",
"]",
"=",
"new_custom",
"for",
"a_link",
"in",
"entry",
".",
"link",
":",
"if",
"a_link",
".",
"rel",
"==",
"'edit'",
":",
"return",
"self",
".",
"Put",
"(",
"entry",
",",
"a_link",
".",
"href",
",",
"converter",
"=",
"gdata",
".",
"spreadsheet",
".",
"SpreadsheetsListFromString",
")"
] |
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/spreadsheet/service.py#L338-L357
|
||
Chia-Network/bls-signatures
|
a61089d653fa3653ac94452c73e97efcd461bdf2
|
python-impl/ec.py
|
python
|
twist
|
(point: AffinePoint, ec=default_ec_twist)
|
return AffinePoint(new_x, new_y, False, ec)
|
Given an untwisted point, this converts it's
coordinates to a point on the twisted curve. See Craig Costello
book, look up twists.
|
Given an untwisted point, this converts it's
coordinates to a point on the twisted curve. See Craig Costello
book, look up twists.
|
[
"Given",
"an",
"untwisted",
"point",
"this",
"converts",
"it",
"s",
"coordinates",
"to",
"a",
"point",
"on",
"the",
"twisted",
"curve",
".",
"See",
"Craig",
"Costello",
"book",
"look",
"up",
"twists",
"."
] |
def twist(point: AffinePoint, ec=default_ec_twist) -> AffinePoint:
"""
Given an untwisted point, this converts it's
coordinates to a point on the twisted curve. See Craig Costello
book, look up twists.
"""
f = Fq12.one(ec.q)
wsq = Fq12(ec.q, f.root, Fq6.zero(ec.q))
wcu = Fq12(ec.q, Fq6.zero(ec.q), f.root)
new_x = point.x * wsq
new_y = point.y * wcu
return AffinePoint(new_x, new_y, False, ec)
|
[
"def",
"twist",
"(",
"point",
":",
"AffinePoint",
",",
"ec",
"=",
"default_ec_twist",
")",
"->",
"AffinePoint",
":",
"f",
"=",
"Fq12",
".",
"one",
"(",
"ec",
".",
"q",
")",
"wsq",
"=",
"Fq12",
"(",
"ec",
".",
"q",
",",
"f",
".",
"root",
",",
"Fq6",
".",
"zero",
"(",
"ec",
".",
"q",
")",
")",
"wcu",
"=",
"Fq12",
"(",
"ec",
".",
"q",
",",
"Fq6",
".",
"zero",
"(",
"ec",
".",
"q",
")",
",",
"f",
".",
"root",
")",
"new_x",
"=",
"point",
".",
"x",
"*",
"wsq",
"new_y",
"=",
"point",
".",
"y",
"*",
"wcu",
"return",
"AffinePoint",
"(",
"new_x",
",",
"new_y",
",",
"False",
",",
"ec",
")"
] |
https://github.com/Chia-Network/bls-signatures/blob/a61089d653fa3653ac94452c73e97efcd461bdf2/python-impl/ec.py#L506-L517
|
|
KratosMultiphysics/Kratos
|
0000833054ed0503424eb28205d6508d9ca6cbbc
|
applications/ShallowWaterApplication/python_scripts/postprocess/swap_coordinates_and_offset_ids_process.py
|
python
|
SwapCoordinatesAndOffsetIdsProcess.__init__
|
(self, model, settings)
|
SwapCoordinatesAndOffsetIdsProcess.
This process provides several tools for post-processing.
- Swap the YZ coordinates in order to make 2D simulations consistent at post process.
- Offset the ids in order to differentiate the model parts at the post processing.
|
SwapCoordinatesAndOffsetIdsProcess.
|
[
"SwapCoordinatesAndOffsetIdsProcess",
"."
] |
def __init__(self, model, settings):
""" SwapCoordinatesAndOffsetIdsProcess.
This process provides several tools for post-processing.
- Swap the YZ coordinates in order to make 2D simulations consistent at post process.
- Offset the ids in order to differentiate the model parts at the post processing.
"""
KM.Process.__init__(self)
default_settings = KM.Parameters("""
{
"model_part_name" : "model_part_name",
"swap_yz_coordinates" : true,
"nodes_ids_offset" : 0,
"elements_ids_offset" : 0,
"conditions_ids_offset" : 0,
"properties_ids_offset" : 0
}
""")
settings.ValidateAndAssignDefaults(default_settings)
self.model_part = model[settings["model_part_name"].GetString()]
self.swap_yz_coordinates = settings["swap_yz_coordinates"].GetBool()
self.nodes_ids_offset = settings["nodes_ids_offset"].GetInt()
self.elements_ids_offset = settings["elements_ids_offset"].GetInt()
self.conditions_ids_offset = settings["conditions_ids_offset"].GetInt()
self.properties_ids_offset = settings["properties_ids_offset"].GetInt()
self.execute_initialize_solution_step_is_called = False
|
[
"def",
"__init__",
"(",
"self",
",",
"model",
",",
"settings",
")",
":",
"KM",
".",
"Process",
".",
"__init__",
"(",
"self",
")",
"default_settings",
"=",
"KM",
".",
"Parameters",
"(",
"\"\"\"\n {\n \"model_part_name\" : \"model_part_name\",\n \"swap_yz_coordinates\" : true,\n \"nodes_ids_offset\" : 0,\n \"elements_ids_offset\" : 0,\n \"conditions_ids_offset\" : 0,\n \"properties_ids_offset\" : 0\n }\n \"\"\"",
")",
"settings",
".",
"ValidateAndAssignDefaults",
"(",
"default_settings",
")",
"self",
".",
"model_part",
"=",
"model",
"[",
"settings",
"[",
"\"model_part_name\"",
"]",
".",
"GetString",
"(",
")",
"]",
"self",
".",
"swap_yz_coordinates",
"=",
"settings",
"[",
"\"swap_yz_coordinates\"",
"]",
".",
"GetBool",
"(",
")",
"self",
".",
"nodes_ids_offset",
"=",
"settings",
"[",
"\"nodes_ids_offset\"",
"]",
".",
"GetInt",
"(",
")",
"self",
".",
"elements_ids_offset",
"=",
"settings",
"[",
"\"elements_ids_offset\"",
"]",
".",
"GetInt",
"(",
")",
"self",
".",
"conditions_ids_offset",
"=",
"settings",
"[",
"\"conditions_ids_offset\"",
"]",
".",
"GetInt",
"(",
")",
"self",
".",
"properties_ids_offset",
"=",
"settings",
"[",
"\"properties_ids_offset\"",
"]",
".",
"GetInt",
"(",
")",
"self",
".",
"execute_initialize_solution_step_is_called",
"=",
"False"
] |
https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/ShallowWaterApplication/python_scripts/postprocess/swap_coordinates_and_offset_ids_process.py#L11-L40
|
||
mapnik/mapnik
|
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
|
scons/scons-local-4.1.0/SCons/Util.py
|
python
|
NodeList.__getitem__
|
(self, index)
|
This comes for free on py2,
but py3 slices of NodeList are returning a list
breaking slicing nodelist and refering to
properties and methods on contained object
|
This comes for free on py2,
but py3 slices of NodeList are returning a list
breaking slicing nodelist and refering to
properties and methods on contained object
|
[
"This",
"comes",
"for",
"free",
"on",
"py2",
"but",
"py3",
"slices",
"of",
"NodeList",
"are",
"returning",
"a",
"list",
"breaking",
"slicing",
"nodelist",
"and",
"refering",
"to",
"properties",
"and",
"methods",
"on",
"contained",
"object"
] |
def __getitem__(self, index):
"""
This comes for free on py2,
but py3 slices of NodeList are returning a list
breaking slicing nodelist and refering to
properties and methods on contained object
"""
# return self.__class__(self.data[index])
if isinstance(index, slice):
# Expand the slice object using range()
# limited by number of items in self.data
indices = index.indices(len(self.data))
return self.__class__([self[x] for x in
range(*indices)])
else:
# Return one item of the tart
return self.data[index]
|
[
"def",
"__getitem__",
"(",
"self",
",",
"index",
")",
":",
"# return self.__class__(self.data[index])",
"if",
"isinstance",
"(",
"index",
",",
"slice",
")",
":",
"# Expand the slice object using range()",
"# limited by number of items in self.data",
"indices",
"=",
"index",
".",
"indices",
"(",
"len",
"(",
"self",
".",
"data",
")",
")",
"return",
"self",
".",
"__class__",
"(",
"[",
"self",
"[",
"x",
"]",
"for",
"x",
"in",
"range",
"(",
"*",
"indices",
")",
"]",
")",
"else",
":",
"# Return one item of the tart",
"return",
"self",
".",
"data",
"[",
"index",
"]"
] |
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Util.py#L145-L162
|
||
apple/swift-lldb
|
d74be846ef3e62de946df343e8c234bde93a8912
|
examples/customization/bin-utils/binutils.py
|
python
|
itob
|
(debugger, command_line, result, dict)
|
Convert the integer to print its two's complement representation.
args[0] (mandatory) is the integer to be converted
args[1] (mandatory) is the bit width of the two's complement representation
args[2] (optional) if specified, turns on verbose printing
|
Convert the integer to print its two's complement representation.
args[0] (mandatory) is the integer to be converted
args[1] (mandatory) is the bit width of the two's complement representation
args[2] (optional) if specified, turns on verbose printing
|
[
"Convert",
"the",
"integer",
"to",
"print",
"its",
"two",
"s",
"complement",
"representation",
".",
"args",
"[",
"0",
"]",
"(",
"mandatory",
")",
"is",
"the",
"integer",
"to",
"be",
"converted",
"args",
"[",
"1",
"]",
"(",
"mandatory",
")",
"is",
"the",
"bit",
"width",
"of",
"the",
"two",
"s",
"complement",
"representation",
"args",
"[",
"2",
"]",
"(",
"optional",
")",
"if",
"specified",
"turns",
"on",
"verbose",
"printing"
] |
def itob(debugger, command_line, result, dict):
"""Convert the integer to print its two's complement representation.
args[0] (mandatory) is the integer to be converted
args[1] (mandatory) is the bit width of the two's complement representation
args[2] (optional) if specified, turns on verbose printing"""
args = command_line.split()
try:
n = int(args[0], 0)
width = int(args[1], 0)
if width < 0:
width = 0
except:
print(itob.__doc__)
return
if len(args) > 2:
verbose = True
else:
verbose = False
bits = twos_complement(n, width)
if not bits:
print("insufficient width value: %d" % width)
return
if verbose and width > 0:
pos = positions(width)
print(' ' + ' '.join(pos))
print(' %s' % str(bits))
|
[
"def",
"itob",
"(",
"debugger",
",",
"command_line",
",",
"result",
",",
"dict",
")",
":",
"args",
"=",
"command_line",
".",
"split",
"(",
")",
"try",
":",
"n",
"=",
"int",
"(",
"args",
"[",
"0",
"]",
",",
"0",
")",
"width",
"=",
"int",
"(",
"args",
"[",
"1",
"]",
",",
"0",
")",
"if",
"width",
"<",
"0",
":",
"width",
"=",
"0",
"except",
":",
"print",
"(",
"itob",
".",
"__doc__",
")",
"return",
"if",
"len",
"(",
"args",
")",
">",
"2",
":",
"verbose",
"=",
"True",
"else",
":",
"verbose",
"=",
"False",
"bits",
"=",
"twos_complement",
"(",
"n",
",",
"width",
")",
"if",
"not",
"bits",
":",
"print",
"(",
"\"insufficient width value: %d\"",
"%",
"width",
")",
"return",
"if",
"verbose",
"and",
"width",
">",
"0",
":",
"pos",
"=",
"positions",
"(",
"width",
")",
"print",
"(",
"' '",
"+",
"' '",
".",
"join",
"(",
"pos",
")",
")",
"print",
"(",
"' %s'",
"%",
"str",
"(",
"bits",
")",
")"
] |
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/examples/customization/bin-utils/binutils.py#L97-L124
|
||
google/syzygy
|
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
|
third_party/numpy/files/numpy/lib/scimath.py
|
python
|
_fix_real_abs_gt_1
|
(x)
|
return x
|
Convert `x` to complex if it has real components x_i with abs(x_i)>1.
Otherwise, output is just the array version of the input (via asarray).
Parameters
----------
x : array_like
Returns
-------
array
Examples
--------
>>> np.lib.scimath._fix_real_abs_gt_1([0,1])
array([0, 1])
>>> np.lib.scimath._fix_real_abs_gt_1([0,2])
array([ 0.+0.j, 2.+0.j])
|
Convert `x` to complex if it has real components x_i with abs(x_i)>1.
|
[
"Convert",
"x",
"to",
"complex",
"if",
"it",
"has",
"real",
"components",
"x_i",
"with",
"abs",
"(",
"x_i",
")",
">",
"1",
"."
] |
def _fix_real_abs_gt_1(x):
"""Convert `x` to complex if it has real components x_i with abs(x_i)>1.
Otherwise, output is just the array version of the input (via asarray).
Parameters
----------
x : array_like
Returns
-------
array
Examples
--------
>>> np.lib.scimath._fix_real_abs_gt_1([0,1])
array([0, 1])
>>> np.lib.scimath._fix_real_abs_gt_1([0,2])
array([ 0.+0.j, 2.+0.j])
"""
x = asarray(x)
if any(isreal(x) & (abs(x)>1)):
x = _tocomplex(x)
return x
|
[
"def",
"_fix_real_abs_gt_1",
"(",
"x",
")",
":",
"x",
"=",
"asarray",
"(",
"x",
")",
"if",
"any",
"(",
"isreal",
"(",
"x",
")",
"&",
"(",
"abs",
"(",
"x",
")",
">",
"1",
")",
")",
":",
"x",
"=",
"_tocomplex",
"(",
"x",
")",
"return",
"x"
] |
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/lib/scimath.py#L143-L167
|
|
google/or-tools
|
2cb85b4eead4c38e1c54b48044f92087cf165bce
|
ortools/constraint_solver/samples/vrp_starts_ends.py
|
python
|
print_solution
|
(data, manager, routing, solution)
|
Prints solution on console.
|
Prints solution on console.
|
[
"Prints",
"solution",
"on",
"console",
"."
] |
def print_solution(data, manager, routing, solution):
"""Prints solution on console."""
print(f'Objective: {solution.ObjectiveValue()}')
max_route_distance = 0
for vehicle_id in range(data['num_vehicles']):
index = routing.Start(vehicle_id)
plan_output = 'Route for vehicle {}:\n'.format(vehicle_id)
route_distance = 0
while not routing.IsEnd(index):
plan_output += ' {} -> '.format(manager.IndexToNode(index))
previous_index = index
index = solution.Value(routing.NextVar(index))
route_distance += routing.GetArcCostForVehicle(
previous_index, index, vehicle_id)
plan_output += '{}\n'.format(manager.IndexToNode(index))
plan_output += 'Distance of the route: {}m\n'.format(route_distance)
print(plan_output)
max_route_distance = max(route_distance, max_route_distance)
print('Maximum of the route distances: {}m'.format(max_route_distance))
|
[
"def",
"print_solution",
"(",
"data",
",",
"manager",
",",
"routing",
",",
"solution",
")",
":",
"print",
"(",
"f'Objective: {solution.ObjectiveValue()}'",
")",
"max_route_distance",
"=",
"0",
"for",
"vehicle_id",
"in",
"range",
"(",
"data",
"[",
"'num_vehicles'",
"]",
")",
":",
"index",
"=",
"routing",
".",
"Start",
"(",
"vehicle_id",
")",
"plan_output",
"=",
"'Route for vehicle {}:\\n'",
".",
"format",
"(",
"vehicle_id",
")",
"route_distance",
"=",
"0",
"while",
"not",
"routing",
".",
"IsEnd",
"(",
"index",
")",
":",
"plan_output",
"+=",
"' {} -> '",
".",
"format",
"(",
"manager",
".",
"IndexToNode",
"(",
"index",
")",
")",
"previous_index",
"=",
"index",
"index",
"=",
"solution",
".",
"Value",
"(",
"routing",
".",
"NextVar",
"(",
"index",
")",
")",
"route_distance",
"+=",
"routing",
".",
"GetArcCostForVehicle",
"(",
"previous_index",
",",
"index",
",",
"vehicle_id",
")",
"plan_output",
"+=",
"'{}\\n'",
".",
"format",
"(",
"manager",
".",
"IndexToNode",
"(",
"index",
")",
")",
"plan_output",
"+=",
"'Distance of the route: {}m\\n'",
".",
"format",
"(",
"route_distance",
")",
"print",
"(",
"plan_output",
")",
"max_route_distance",
"=",
"max",
"(",
"route_distance",
",",
"max_route_distance",
")",
"print",
"(",
"'Maximum of the route distances: {}m'",
".",
"format",
"(",
"max_route_distance",
")",
")"
] |
https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/ortools/constraint_solver/samples/vrp_starts_ends.py#L107-L125
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/input_reader.py
|
python
|
InputReader.to_json
|
(self)
|
Returns input reader state for the remaining inputs.
Returns:
A json-serializable state for the InputReader.
|
Returns input reader state for the remaining inputs.
|
[
"Returns",
"input",
"reader",
"state",
"for",
"the",
"remaining",
"inputs",
"."
] |
def to_json(self):
"""Returns input reader state for the remaining inputs.
Returns:
A json-serializable state for the InputReader.
"""
raise NotImplementedError("to_json() not implemented in %s" %
self.__class__)
|
[
"def",
"to_json",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"to_json() not implemented in %s\"",
"%",
"self",
".",
"__class__",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/input_reader.py#L63-L70
|
||
goldeneye-source/ges-code
|
2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d
|
thirdparty/protobuf-2.3.0/python/mox.py
|
python
|
UnorderedGroup.IsSatisfied
|
(self)
|
return len(self._methods) == 0
|
Return True if there are not any methods in this group.
|
Return True if there are not any methods in this group.
|
[
"Return",
"True",
"if",
"there",
"are",
"not",
"any",
"methods",
"in",
"this",
"group",
"."
] |
def IsSatisfied(self):
"""Return True if there are not any methods in this group."""
return len(self._methods) == 0
|
[
"def",
"IsSatisfied",
"(",
"self",
")",
":",
"return",
"len",
"(",
"self",
".",
"_methods",
")",
"==",
"0"
] |
https://github.com/goldeneye-source/ges-code/blob/2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d/thirdparty/protobuf-2.3.0/python/mox.py#L1257-L1260
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/spatial/_spherical_voronoi.py
|
python
|
project_to_sphere
|
(points, center, radius)
|
return (points - center) / lengths * radius + center
|
Projects the elements of points onto the sphere defined
by center and radius.
Parameters
----------
points : array of floats of shape (npoints, ndim)
consisting of the points in a space of dimension ndim
center : array of floats of shape (ndim,)
the center of the sphere to project on
radius : float
the radius of the sphere to project on
returns: array of floats of shape (npoints, ndim)
the points projected onto the sphere
|
Projects the elements of points onto the sphere defined
by center and radius.
|
[
"Projects",
"the",
"elements",
"of",
"points",
"onto",
"the",
"sphere",
"defined",
"by",
"center",
"and",
"radius",
"."
] |
def project_to_sphere(points, center, radius):
"""
Projects the elements of points onto the sphere defined
by center and radius.
Parameters
----------
points : array of floats of shape (npoints, ndim)
consisting of the points in a space of dimension ndim
center : array of floats of shape (ndim,)
the center of the sphere to project on
radius : float
the radius of the sphere to project on
returns: array of floats of shape (npoints, ndim)
the points projected onto the sphere
"""
lengths = scipy.spatial.distance.cdist(points, np.array([center]))
return (points - center) / lengths * radius + center
|
[
"def",
"project_to_sphere",
"(",
"points",
",",
"center",
",",
"radius",
")",
":",
"lengths",
"=",
"scipy",
".",
"spatial",
".",
"distance",
".",
"cdist",
"(",
"points",
",",
"np",
".",
"array",
"(",
"[",
"center",
"]",
")",
")",
"return",
"(",
"points",
"-",
"center",
")",
"/",
"lengths",
"*",
"radius",
"+",
"center"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/spatial/_spherical_voronoi.py#L71-L90
|
|
martinmoene/lest
|
f3e9dfe4a66c3e60dfdac7a3d3e4ddc0dcf06b26
|
script/create-vcpkg.py
|
python
|
portfile_path
|
( args )
|
return tpl_path_vcpkg_portfile.format( vcpkg=args.vcpkg_root, prj=args.project )
|
Create path like vcpks/ports/_project_/portfile.cmake
|
Create path like vcpks/ports/_project_/portfile.cmake
|
[
"Create",
"path",
"like",
"vcpks",
"/",
"ports",
"/",
"_project_",
"/",
"portfile",
".",
"cmake"
] |
def portfile_path( args ):
"""Create path like vcpks/ports/_project_/portfile.cmake"""
return tpl_path_vcpkg_portfile.format( vcpkg=args.vcpkg_root, prj=args.project )
|
[
"def",
"portfile_path",
"(",
"args",
")",
":",
"return",
"tpl_path_vcpkg_portfile",
".",
"format",
"(",
"vcpkg",
"=",
"args",
".",
"vcpkg_root",
",",
"prj",
"=",
"args",
".",
"project",
")"
] |
https://github.com/martinmoene/lest/blob/f3e9dfe4a66c3e60dfdac7a3d3e4ddc0dcf06b26/script/create-vcpkg.py#L96-L98
|
|
GJDuck/LowFat
|
ecf6a0f0fa1b73a27a626cf493cc39e477b6faea
|
llvm-4.0.0.src/tools/clang/bindings/python/clang/cindex.py
|
python
|
Type.translation_unit
|
(self)
|
return self._tu
|
The TranslationUnit to which this Type is associated.
|
The TranslationUnit to which this Type is associated.
|
[
"The",
"TranslationUnit",
"to",
"which",
"this",
"Type",
"is",
"associated",
"."
] |
def translation_unit(self):
"""The TranslationUnit to which this Type is associated."""
# If this triggers an AttributeError, the instance was not properly
# instantiated.
return self._tu
|
[
"def",
"translation_unit",
"(",
"self",
")",
":",
"# If this triggers an AttributeError, the instance was not properly",
"# instantiated.",
"return",
"self",
".",
"_tu"
] |
https://github.com/GJDuck/LowFat/blob/ecf6a0f0fa1b73a27a626cf493cc39e477b6faea/llvm-4.0.0.src/tools/clang/bindings/python/clang/cindex.py#L2005-L2009
|
|
wyrover/book-code
|
7f4883d9030d553bc6bcfa3da685e34789839900
|
3rdparty/protobuf/python/google/protobuf/internal/containers.py
|
python
|
RepeatedScalarFieldContainer.pop
|
(self, key=-1)
|
return value
|
Removes and returns an item at a given index. Similar to list.pop().
|
Removes and returns an item at a given index. Similar to list.pop().
|
[
"Removes",
"and",
"returns",
"an",
"item",
"at",
"a",
"given",
"index",
".",
"Similar",
"to",
"list",
".",
"pop",
"()",
"."
] |
def pop(self, key=-1):
"""Removes and returns an item at a given index. Similar to list.pop()."""
value = self._values[key]
self.__delitem__(key)
return value
|
[
"def",
"pop",
"(",
"self",
",",
"key",
"=",
"-",
"1",
")",
":",
"value",
"=",
"self",
".",
"_values",
"[",
"key",
"]",
"self",
".",
"__delitem__",
"(",
"key",
")",
"return",
"value"
] |
https://github.com/wyrover/book-code/blob/7f4883d9030d553bc6bcfa3da685e34789839900/3rdparty/protobuf/python/google/protobuf/internal/containers.py#L292-L296
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_core.py
|
python
|
MouseEvent.Aux2DClick
|
(*args, **kwargs)
|
return _core_.MouseEvent_Aux2DClick(*args, **kwargs)
|
Aux2DClick(self) -> bool
Returns true if the event was a AUX2 button double click.
|
Aux2DClick(self) -> bool
|
[
"Aux2DClick",
"(",
"self",
")",
"-",
">",
"bool"
] |
def Aux2DClick(*args, **kwargs):
"""
Aux2DClick(self) -> bool
Returns true if the event was a AUX2 button double click.
"""
return _core_.MouseEvent_Aux2DClick(*args, **kwargs)
|
[
"def",
"Aux2DClick",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"MouseEvent_Aux2DClick",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L5737-L5743
|
|
psi4/psi4
|
be533f7f426b6ccc263904e55122899b16663395
|
psi4/driver/mdi_engine.py
|
python
|
MDIEngine.run_scf
|
(self)
|
Run an energy calculation
|
Run an energy calculation
|
[
"Run",
"an",
"energy",
"calculation"
] |
def run_scf(self):
""" Run an energy calculation
"""
self.energy = psi4.energy(self.scf_method, **self.kwargs)
|
[
"def",
"run_scf",
"(",
"self",
")",
":",
"self",
".",
"energy",
"=",
"psi4",
".",
"energy",
"(",
"self",
".",
"scf_method",
",",
"*",
"*",
"self",
".",
"kwargs",
")"
] |
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/mdi_engine.py#L336-L339
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py
|
python
|
IResourceProvider.resource_isdir
|
(resource_name)
|
Is the named resource a directory? (like ``os.path.isdir()``)
|
Is the named resource a directory? (like ``os.path.isdir()``)
|
[
"Is",
"the",
"named",
"resource",
"a",
"directory?",
"(",
"like",
"os",
".",
"path",
".",
"isdir",
"()",
")"
] |
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
|
[
"def",
"resource_isdir",
"(",
"resource_name",
")",
":"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py#L547-L548
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/series.py
|
python
|
Series.between
|
(self, left, right, inclusive=True)
|
return lmask & rmask
|
Return boolean Series equivalent to left <= series <= right.
This function returns a boolean vector containing `True` wherever the
corresponding Series element is between the boundary values `left` and
`right`. NA values are treated as `False`.
Parameters
----------
left : scalar or list-like
Left boundary.
right : scalar or list-like
Right boundary.
inclusive : bool, default True
Include boundaries.
Returns
-------
Series
Series representing whether each element is between left and
right (inclusive).
See Also
--------
Series.gt : Greater than of series and other.
Series.lt : Less than of series and other.
Notes
-----
This function is equivalent to ``(left <= ser) & (ser <= right)``
Examples
--------
>>> s = pd.Series([2, 0, 4, 8, np.nan])
Boundary values are included by default:
>>> s.between(1, 4)
0 True
1 False
2 True
3 False
4 False
dtype: bool
With `inclusive` set to ``False`` boundary values are excluded:
>>> s.between(1, 4, inclusive=False)
0 True
1 False
2 False
3 False
4 False
dtype: bool
`left` and `right` can be any scalar value:
>>> s = pd.Series(['Alice', 'Bob', 'Carol', 'Eve'])
>>> s.between('Anna', 'Daniel')
0 False
1 True
2 True
3 False
dtype: bool
|
Return boolean Series equivalent to left <= series <= right.
|
[
"Return",
"boolean",
"Series",
"equivalent",
"to",
"left",
"<",
"=",
"series",
"<",
"=",
"right",
"."
] |
def between(self, left, right, inclusive=True):
"""
Return boolean Series equivalent to left <= series <= right.
This function returns a boolean vector containing `True` wherever the
corresponding Series element is between the boundary values `left` and
`right`. NA values are treated as `False`.
Parameters
----------
left : scalar or list-like
Left boundary.
right : scalar or list-like
Right boundary.
inclusive : bool, default True
Include boundaries.
Returns
-------
Series
Series representing whether each element is between left and
right (inclusive).
See Also
--------
Series.gt : Greater than of series and other.
Series.lt : Less than of series and other.
Notes
-----
This function is equivalent to ``(left <= ser) & (ser <= right)``
Examples
--------
>>> s = pd.Series([2, 0, 4, 8, np.nan])
Boundary values are included by default:
>>> s.between(1, 4)
0 True
1 False
2 True
3 False
4 False
dtype: bool
With `inclusive` set to ``False`` boundary values are excluded:
>>> s.between(1, 4, inclusive=False)
0 True
1 False
2 False
3 False
4 False
dtype: bool
`left` and `right` can be any scalar value:
>>> s = pd.Series(['Alice', 'Bob', 'Carol', 'Eve'])
>>> s.between('Anna', 'Daniel')
0 False
1 True
2 True
3 False
dtype: bool
"""
if inclusive:
lmask = self >= left
rmask = self <= right
else:
lmask = self > left
rmask = self < right
return lmask & rmask
|
[
"def",
"between",
"(",
"self",
",",
"left",
",",
"right",
",",
"inclusive",
"=",
"True",
")",
":",
"if",
"inclusive",
":",
"lmask",
"=",
"self",
">=",
"left",
"rmask",
"=",
"self",
"<=",
"right",
"else",
":",
"lmask",
"=",
"self",
">",
"left",
"rmask",
"=",
"self",
"<",
"right",
"return",
"lmask",
"&",
"rmask"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/series.py#L4297-L4370
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
xmlTextReader.ReadString
|
(self)
|
return ret
|
Reads the contents of an element or a text node as a string.
|
Reads the contents of an element or a text node as a string.
|
[
"Reads",
"the",
"contents",
"of",
"an",
"element",
"or",
"a",
"text",
"node",
"as",
"a",
"string",
"."
] |
def ReadString(self):
"""Reads the contents of an element or a text node as a string. """
ret = libxml2mod.xmlTextReaderReadString(self._o)
return ret
|
[
"def",
"ReadString",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlTextReaderReadString",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L6067-L6070
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py
|
python
|
Decimal.__long__
|
(self)
|
return long(self.__int__())
|
Converts to a long.
Equivalent to long(int(self))
|
Converts to a long.
|
[
"Converts",
"to",
"a",
"long",
"."
] |
def __long__(self):
"""Converts to a long.
Equivalent to long(int(self))
"""
return long(self.__int__())
|
[
"def",
"__long__",
"(",
"self",
")",
":",
"return",
"long",
"(",
"self",
".",
"__int__",
"(",
")",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/decimal.py#L1621-L1626
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
caffe2/python/workspace.py
|
python
|
FetchBlob
|
(name)
|
return result
|
Fetches a blob from the workspace.
Inputs:
name: the name of the blob - a string or a BlobReference
Returns:
Fetched blob (numpy array or string) if successful
|
Fetches a blob from the workspace.
|
[
"Fetches",
"a",
"blob",
"from",
"the",
"workspace",
"."
] |
def FetchBlob(name):
"""Fetches a blob from the workspace.
Inputs:
name: the name of the blob - a string or a BlobReference
Returns:
Fetched blob (numpy array or string) if successful
"""
result = C.fetch_blob(StringifyBlobName(name))
if isinstance(result, tuple):
raise TypeError(
"Use FetchInt8Blob to fetch Int8 Blob {}".format(
StringifyBlobName(name)
)
)
return result
|
[
"def",
"FetchBlob",
"(",
"name",
")",
":",
"result",
"=",
"C",
".",
"fetch_blob",
"(",
"StringifyBlobName",
"(",
"name",
")",
")",
"if",
"isinstance",
"(",
"result",
",",
"tuple",
")",
":",
"raise",
"TypeError",
"(",
"\"Use FetchInt8Blob to fetch Int8 Blob {}\"",
".",
"format",
"(",
"StringifyBlobName",
"(",
"name",
")",
")",
")",
"return",
"result"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/caffe2/python/workspace.py#L378-L393
|
|
Kitware/ParaView
|
f760af9124ff4634b23ebbeab95a4f56e0261955
|
Wrapping/Python/paraview/servermanager.py
|
python
|
ProxyProperty.GetData
|
(self)
|
return None
|
Returns all elements as either a list or a single value.
|
Returns all elements as either a list or a single value.
|
[
"Returns",
"all",
"elements",
"as",
"either",
"a",
"list",
"or",
"a",
"single",
"value",
"."
] |
def GetData(self):
"Returns all elements as either a list or a single value."
property = self.SMProperty
if property.GetRepeatable() or property.GetNumberOfProxies() > 1:
return self[0:len(self)]
else:
if property.GetNumberOfProxies() > 0:
return _getPyProxy(property.GetProxy(0))
return None
|
[
"def",
"GetData",
"(",
"self",
")",
":",
"property",
"=",
"self",
".",
"SMProperty",
"if",
"property",
".",
"GetRepeatable",
"(",
")",
"or",
"property",
".",
"GetNumberOfProxies",
"(",
")",
">",
"1",
":",
"return",
"self",
"[",
"0",
":",
"len",
"(",
"self",
")",
"]",
"else",
":",
"if",
"property",
".",
"GetNumberOfProxies",
"(",
")",
">",
"0",
":",
"return",
"_getPyProxy",
"(",
"property",
".",
"GetProxy",
"(",
"0",
")",
")",
"return",
"None"
] |
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Wrapping/Python/paraview/servermanager.py#L1359-L1367
|
|
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
tools/metrics/actions/extract_actions.py
|
python
|
AddWebUIActions
|
(actions)
|
Add user actions defined in WebUI files.
Arguments:
actions: set of actions to add to.
|
Add user actions defined in WebUI files.
|
[
"Add",
"user",
"actions",
"defined",
"in",
"WebUI",
"files",
"."
] |
def AddWebUIActions(actions):
"""Add user actions defined in WebUI files.
Arguments:
actions: set of actions to add to.
"""
resources_root = os.path.join(REPOSITORY_ROOT, 'chrome', 'browser',
'resources')
WalkDirectory(resources_root, actions, ('.html'), GrepForWebUIActions)
|
[
"def",
"AddWebUIActions",
"(",
"actions",
")",
":",
"resources_root",
"=",
"os",
".",
"path",
".",
"join",
"(",
"REPOSITORY_ROOT",
",",
"'chrome'",
",",
"'browser'",
",",
"'resources'",
")",
"WalkDirectory",
"(",
"resources_root",
",",
"actions",
",",
"(",
"'.html'",
")",
",",
"GrepForWebUIActions",
")"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/metrics/actions/extract_actions.py#L520-L528
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
build/get_syzygy_binaries.py
|
python
|
_Md5
|
(path)
|
return hashlib.md5(open(path, 'rb').read()).hexdigest()
|
Returns the MD5 hash of the file at |path|, which must exist.
|
Returns the MD5 hash of the file at |path|, which must exist.
|
[
"Returns",
"the",
"MD5",
"hash",
"of",
"the",
"file",
"at",
"|path|",
"which",
"must",
"exist",
"."
] |
def _Md5(path):
"""Returns the MD5 hash of the file at |path|, which must exist."""
return hashlib.md5(open(path, 'rb').read()).hexdigest()
|
[
"def",
"_Md5",
"(",
"path",
")",
":",
"return",
"hashlib",
".",
"md5",
"(",
"open",
"(",
"path",
",",
"'rb'",
")",
".",
"read",
"(",
")",
")",
".",
"hexdigest",
"(",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/build/get_syzygy_binaries.py#L78-L80
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/xml/dom/expatbuilder.py
|
python
|
ExpatBuilder.parseFile
|
(self, file)
|
return doc
|
Parse a document from a file object, returning the document
node.
|
Parse a document from a file object, returning the document
node.
|
[
"Parse",
"a",
"document",
"from",
"a",
"file",
"object",
"returning",
"the",
"document",
"node",
"."
] |
def parseFile(self, file):
"""Parse a document from a file object, returning the document
node."""
parser = self.getParser()
first_buffer = True
try:
while 1:
buffer = file.read(16*1024)
if not buffer:
break
parser.Parse(buffer, 0)
if first_buffer and self.document.documentElement:
self._setup_subset(buffer)
first_buffer = False
parser.Parse("", True)
except ParseEscape:
pass
doc = self.document
self.reset()
self._parser = None
return doc
|
[
"def",
"parseFile",
"(",
"self",
",",
"file",
")",
":",
"parser",
"=",
"self",
".",
"getParser",
"(",
")",
"first_buffer",
"=",
"True",
"try",
":",
"while",
"1",
":",
"buffer",
"=",
"file",
".",
"read",
"(",
"16",
"*",
"1024",
")",
"if",
"not",
"buffer",
":",
"break",
"parser",
".",
"Parse",
"(",
"buffer",
",",
"0",
")",
"if",
"first_buffer",
"and",
"self",
".",
"document",
".",
"documentElement",
":",
"self",
".",
"_setup_subset",
"(",
"buffer",
")",
"first_buffer",
"=",
"False",
"parser",
".",
"Parse",
"(",
"\"\"",
",",
"True",
")",
"except",
"ParseEscape",
":",
"pass",
"doc",
"=",
"self",
".",
"document",
"self",
".",
"reset",
"(",
")",
"self",
".",
"_parser",
"=",
"None",
"return",
"doc"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/xml/dom/expatbuilder.py#L197-L217
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
caffe2/python/data_parallel_model.py
|
python
|
_InterleaveOps
|
(model)
|
Data Parallel Model creates a net with ops in one device grouped together.
This will interleave the ops so that each op for each device is next
to each other in the net. Kind of like combining decks of cards. This
ensures that progress is made along the critical path roughly concurrently
for each device, which is important due to the extra intra-node
synchronization required for multi-device batch normalization.
|
Data Parallel Model creates a net with ops in one device grouped together.
This will interleave the ops so that each op for each device is next
to each other in the net. Kind of like combining decks of cards. This
ensures that progress is made along the critical path roughly concurrently
for each device, which is important due to the extra intra-node
synchronization required for multi-device batch normalization.
|
[
"Data",
"Parallel",
"Model",
"creates",
"a",
"net",
"with",
"ops",
"in",
"one",
"device",
"grouped",
"together",
".",
"This",
"will",
"interleave",
"the",
"ops",
"so",
"that",
"each",
"op",
"for",
"each",
"device",
"is",
"next",
"to",
"each",
"other",
"in",
"the",
"net",
".",
"Kind",
"of",
"like",
"combining",
"decks",
"of",
"cards",
".",
"This",
"ensures",
"that",
"progress",
"is",
"made",
"along",
"the",
"critical",
"path",
"roughly",
"concurrently",
"for",
"each",
"device",
"which",
"is",
"important",
"due",
"to",
"the",
"extra",
"intra",
"-",
"node",
"synchronization",
"required",
"for",
"multi",
"-",
"device",
"batch",
"normalization",
"."
] |
def _InterleaveOps(model):
'''
Data Parallel Model creates a net with ops in one device grouped together.
This will interleave the ops so that each op for each device is next
to each other in the net. Kind of like combining decks of cards. This
ensures that progress is made along the critical path roughly concurrently
for each device, which is important due to the extra intra-node
synchronization required for multi-device batch normalization.
'''
orig_ops = list(model.net.Proto().op)
num_devices = len(model._devices)
num_ops_per_dev = len(orig_ops) // num_devices
assert num_devices * num_ops_per_dev == len(orig_ops), \
'Number of ops per device in original net is not uniform'
new_ops = []
ops = {d: [] for d in range(num_devices)}
for op in orig_ops:
ops[op.device_option.device_id].append(op)
for j in range(num_ops_per_dev):
tp = None
for d in model._devices:
if tp is None:
tp = ops[d][j].type
new_ops.append(ops[d][j])
# Sanity
assert ops[d][j].type == tp, \
"Type mismatch {} / {}".format(tp, ops[d][j].type)
del model.net.Proto().op[:]
model.net.Proto().op.extend(new_ops)
|
[
"def",
"_InterleaveOps",
"(",
"model",
")",
":",
"orig_ops",
"=",
"list",
"(",
"model",
".",
"net",
".",
"Proto",
"(",
")",
".",
"op",
")",
"num_devices",
"=",
"len",
"(",
"model",
".",
"_devices",
")",
"num_ops_per_dev",
"=",
"len",
"(",
"orig_ops",
")",
"//",
"num_devices",
"assert",
"num_devices",
"*",
"num_ops_per_dev",
"==",
"len",
"(",
"orig_ops",
")",
",",
"'Number of ops per device in original net is not uniform'",
"new_ops",
"=",
"[",
"]",
"ops",
"=",
"{",
"d",
":",
"[",
"]",
"for",
"d",
"in",
"range",
"(",
"num_devices",
")",
"}",
"for",
"op",
"in",
"orig_ops",
":",
"ops",
"[",
"op",
".",
"device_option",
".",
"device_id",
"]",
".",
"append",
"(",
"op",
")",
"for",
"j",
"in",
"range",
"(",
"num_ops_per_dev",
")",
":",
"tp",
"=",
"None",
"for",
"d",
"in",
"model",
".",
"_devices",
":",
"if",
"tp",
"is",
"None",
":",
"tp",
"=",
"ops",
"[",
"d",
"]",
"[",
"j",
"]",
".",
"type",
"new_ops",
".",
"append",
"(",
"ops",
"[",
"d",
"]",
"[",
"j",
"]",
")",
"# Sanity",
"assert",
"ops",
"[",
"d",
"]",
"[",
"j",
"]",
".",
"type",
"==",
"tp",
",",
"\"Type mismatch {} / {}\"",
".",
"format",
"(",
"tp",
",",
"ops",
"[",
"d",
"]",
"[",
"j",
"]",
".",
"type",
")",
"del",
"model",
".",
"net",
".",
"Proto",
"(",
")",
".",
"op",
"[",
":",
"]",
"model",
".",
"net",
".",
"Proto",
"(",
")",
".",
"op",
".",
"extend",
"(",
"new_ops",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/caffe2/python/data_parallel_model.py#L1935-L1965
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/_abcoll.py
|
python
|
MutableSet.discard
|
(self, value)
|
Remove an element. Do not raise an exception if absent.
|
Remove an element. Do not raise an exception if absent.
|
[
"Remove",
"an",
"element",
".",
"Do",
"not",
"raise",
"an",
"exception",
"if",
"absent",
"."
] |
def discard(self, value):
"""Remove an element. Do not raise an exception if absent."""
raise NotImplementedError
|
[
"def",
"discard",
"(",
"self",
",",
"value",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/_abcoll.py#L300-L302
|
||
kevinlin311tw/Caffe-DeepBinaryCode
|
9eaa7662be47d49f475ecbeea2bd51be105270d2
|
scripts/cpp_lint.py
|
python
|
ParseNolintSuppressions
|
(filename, raw_line, linenum, error)
|
Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
|
Updates the global list of error-suppressions.
|
[
"Updates",
"the",
"global",
"list",
"of",
"error",
"-",
"suppressions",
"."
] |
def ParseNolintSuppressions(filename, raw_line, linenum, error):
"""Updates the global list of error-suppressions.
Parses any NOLINT comments on the current line, updating the global
error_suppressions store. Reports an error if the NOLINT comment
was malformed.
Args:
filename: str, the name of the input file.
raw_line: str, the line of input text, with comments.
linenum: int, the number of the current line.
error: function, an error handler.
"""
# FIXME(adonovan): "NOLINT(" is misparsed as NOLINT(*).
matched = _RE_SUPPRESSION.search(raw_line)
if matched:
if matched.group(1) == '_NEXT_LINE':
linenum += 1
category = matched.group(2)
if category in (None, '(*)'): # => "suppress all"
_error_suppressions.setdefault(None, set()).add(linenum)
else:
if category.startswith('(') and category.endswith(')'):
category = category[1:-1]
if category in _ERROR_CATEGORIES:
_error_suppressions.setdefault(category, set()).add(linenum)
else:
error(filename, linenum, 'readability/nolint', 5,
'Unknown NOLINT error category: %s' % category)
|
[
"def",
"ParseNolintSuppressions",
"(",
"filename",
",",
"raw_line",
",",
"linenum",
",",
"error",
")",
":",
"# FIXME(adonovan): \"NOLINT(\" is misparsed as NOLINT(*).",
"matched",
"=",
"_RE_SUPPRESSION",
".",
"search",
"(",
"raw_line",
")",
"if",
"matched",
":",
"if",
"matched",
".",
"group",
"(",
"1",
")",
"==",
"'_NEXT_LINE'",
":",
"linenum",
"+=",
"1",
"category",
"=",
"matched",
".",
"group",
"(",
"2",
")",
"if",
"category",
"in",
"(",
"None",
",",
"'(*)'",
")",
":",
"# => \"suppress all\"",
"_error_suppressions",
".",
"setdefault",
"(",
"None",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"linenum",
")",
"else",
":",
"if",
"category",
".",
"startswith",
"(",
"'('",
")",
"and",
"category",
".",
"endswith",
"(",
"')'",
")",
":",
"category",
"=",
"category",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"category",
"in",
"_ERROR_CATEGORIES",
":",
"_error_suppressions",
".",
"setdefault",
"(",
"category",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"linenum",
")",
"else",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/nolint'",
",",
"5",
",",
"'Unknown NOLINT error category: %s'",
"%",
"category",
")"
] |
https://github.com/kevinlin311tw/Caffe-DeepBinaryCode/blob/9eaa7662be47d49f475ecbeea2bd51be105270d2/scripts/cpp_lint.py#L464-L492
|
||
plumonito/dtslam
|
5994bb9cf7a11981b830370db206bceb654c085d
|
3rdparty/opencv-git/doc/pattern_tools/svgfig.py
|
python
|
SVG.items
|
(self, sub=True, attr=True, text=True)
|
return output
|
Get a recursively-generated list of tree-index, sub-element/attribute pairs.
If sub == False, do not show sub-elements.
If attr == False, do not show attributes.
If text == False, do not show text/Unicode sub-elements.
|
Get a recursively-generated list of tree-index, sub-element/attribute pairs.
|
[
"Get",
"a",
"recursively",
"-",
"generated",
"list",
"of",
"tree",
"-",
"index",
"sub",
"-",
"element",
"/",
"attribute",
"pairs",
"."
] |
def items(self, sub=True, attr=True, text=True):
"""Get a recursively-generated list of tree-index, sub-element/attribute pairs.
If sub == False, do not show sub-elements.
If attr == False, do not show attributes.
If text == False, do not show text/Unicode sub-elements.
"""
output = []
for ti, s in self:
show = False
if isinstance(ti[-1], (int, long)):
if isinstance(s, basestring):
show = text
else:
show = sub
else:
show = attr
if show:
output.append((ti, s))
return output
|
[
"def",
"items",
"(",
"self",
",",
"sub",
"=",
"True",
",",
"attr",
"=",
"True",
",",
"text",
"=",
"True",
")",
":",
"output",
"=",
"[",
"]",
"for",
"ti",
",",
"s",
"in",
"self",
":",
"show",
"=",
"False",
"if",
"isinstance",
"(",
"ti",
"[",
"-",
"1",
"]",
",",
"(",
"int",
",",
"long",
")",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"basestring",
")",
":",
"show",
"=",
"text",
"else",
":",
"show",
"=",
"sub",
"else",
":",
"show",
"=",
"attr",
"if",
"show",
":",
"output",
".",
"append",
"(",
"(",
"ti",
",",
"s",
")",
")",
"return",
"output"
] |
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/doc/pattern_tools/svgfig.py#L270-L290
|
|
verilog-to-routing/vtr-verilog-to-routing
|
d9719cf7374821156c3cee31d66991cb85578562
|
vtr_flow/scripts/benchtracker/flask_cors/core.py
|
python
|
re_fix
|
(reg)
|
return r".*" if reg == r"*" else reg
|
Replace the invalid regex r'*' with the valid, wildcard regex r'/.*' to
enable the CORS app extension to have a more user friendly api.
|
Replace the invalid regex r'*' with the valid, wildcard regex r'/.*' to
enable the CORS app extension to have a more user friendly api.
|
[
"Replace",
"the",
"invalid",
"regex",
"r",
"*",
"with",
"the",
"valid",
"wildcard",
"regex",
"r",
"/",
".",
"*",
"to",
"enable",
"the",
"CORS",
"app",
"extension",
"to",
"have",
"a",
"more",
"user",
"friendly",
"api",
"."
] |
def re_fix(reg):
"""
Replace the invalid regex r'*' with the valid, wildcard regex r'/.*' to
enable the CORS app extension to have a more user friendly api.
"""
return r".*" if reg == r"*" else reg
|
[
"def",
"re_fix",
"(",
"reg",
")",
":",
"return",
"r\".*\"",
"if",
"reg",
"==",
"r\"*\"",
"else",
"reg"
] |
https://github.com/verilog-to-routing/vtr-verilog-to-routing/blob/d9719cf7374821156c3cee31d66991cb85578562/vtr_flow/scripts/benchtracker/flask_cors/core.py#L251-L256
|
|
apache/incubator-mxnet
|
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
|
python/mxnet/symbol/numpy/_symbol.py
|
python
|
blackman
|
(M, dtype=None, ctx=None)
|
return _npi.blackman(M, dtype=dtype, ctx=ctx)
|
r"""Return the Blackman window.
The Blackman window is a taper formed by using the first three
terms of a summation of cosines. It was designed to have close to the
minimal leakage possible. It is close to optimal, only slightly worse
than a Kaiser window.
Parameters
----------
M : int
Number of points in the output window. If zero or less, an
empty array is returned.
ctx : Context, optional
An optional device context (default is the current default context).
Returns
-------
out : _Symbol
The window, with the maximum value normalized to one (the value one
appears only if the number of samples is odd).
When npx.is_np_default_dtype() returns False, default dtype is float32;
When npx.is_np_default_dtype() returns True, default dtype is float64.
Note that you need select numpy.float32 or float64 in this operator.
See Also
--------
hamming, hanning
Notes
-----
The Blackman window is defined as
.. math:: w(n) = 0.42 - 0.5 \cos(2\pi n/{M-1}) + 0.08 \cos(4\pi n/{M-1})
Most references to the Blackman window come from the signal processing
literature, where it is used as one of many windowing functions for
smoothing values. It is also known as an apodization (which means
"removing the foot", i.e. smoothing discontinuities at the beginning
and end of the sampled signal) or tapering function. It is known as a
"near optimal" tapering function, almost as good (by some measures)
as the kaiser window.
References
----------
Blackman, R.B. and Tukey, J.W., (1958) The measurement of power spectra,
Dover Publications, New York.
Oppenheim, A.V., and R.W. Schafer. Discrete-Time Signal Processing.
Upper Saddle River, NJ: Prentice-Hall, 1999, pp. 468-471.
Examples
--------
>>> np.blackman(12)
array([-1.4901161e-08, 3.2606423e-02, 1.5990365e-01, 4.1439798e-01,
7.3604530e-01, 9.6704686e-01, 9.6704674e-01, 7.3604506e-01,
4.1439781e-01, 1.5990359e-01, 3.2606363e-02, -1.4901161e-08])
Plot the window and its frequency response:
>>> import matplotlib.pyplot as plt
>>> window = np.blackman(51)
>>> plt.plot(window.asnumpy())
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.title("blackman window")
Text(0.5, 1.0, 'blackman window')
>>> plt.ylabel("Amplitude")
Text(0, 0.5, 'Amplitude')
>>> plt.xlabel("Sample")
Text(0.5, 0, 'Sample')
>>> plt.show()
|
r"""Return the Blackman window.
|
[
"r",
"Return",
"the",
"Blackman",
"window",
"."
] |
def blackman(M, dtype=None, ctx=None):
r"""Return the Blackman window.
The Blackman window is a taper formed by using the first three
terms of a summation of cosines. It was designed to have close to the
minimal leakage possible. It is close to optimal, only slightly worse
than a Kaiser window.
Parameters
----------
M : int
Number of points in the output window. If zero or less, an
empty array is returned.
ctx : Context, optional
An optional device context (default is the current default context).
Returns
-------
out : _Symbol
The window, with the maximum value normalized to one (the value one
appears only if the number of samples is odd).
When npx.is_np_default_dtype() returns False, default dtype is float32;
When npx.is_np_default_dtype() returns True, default dtype is float64.
Note that you need select numpy.float32 or float64 in this operator.
See Also
--------
hamming, hanning
Notes
-----
The Blackman window is defined as
.. math:: w(n) = 0.42 - 0.5 \cos(2\pi n/{M-1}) + 0.08 \cos(4\pi n/{M-1})
Most references to the Blackman window come from the signal processing
literature, where it is used as one of many windowing functions for
smoothing values. It is also known as an apodization (which means
"removing the foot", i.e. smoothing discontinuities at the beginning
and end of the sampled signal) or tapering function. It is known as a
"near optimal" tapering function, almost as good (by some measures)
as the kaiser window.
References
----------
Blackman, R.B. and Tukey, J.W., (1958) The measurement of power spectra,
Dover Publications, New York.
Oppenheim, A.V., and R.W. Schafer. Discrete-Time Signal Processing.
Upper Saddle River, NJ: Prentice-Hall, 1999, pp. 468-471.
Examples
--------
>>> np.blackman(12)
array([-1.4901161e-08, 3.2606423e-02, 1.5990365e-01, 4.1439798e-01,
7.3604530e-01, 9.6704686e-01, 9.6704674e-01, 7.3604506e-01,
4.1439781e-01, 1.5990359e-01, 3.2606363e-02, -1.4901161e-08])
Plot the window and its frequency response:
>>> import matplotlib.pyplot as plt
>>> window = np.blackman(51)
>>> plt.plot(window.asnumpy())
[<matplotlib.lines.Line2D object at 0x...>]
>>> plt.title("blackman window")
Text(0.5, 1.0, 'blackman window')
>>> plt.ylabel("Amplitude")
Text(0, 0.5, 'Amplitude')
>>> plt.xlabel("Sample")
Text(0.5, 0, 'Sample')
>>> plt.show()
"""
if ctx is None:
ctx = current_context()
return _npi.blackman(M, dtype=dtype, ctx=ctx)
|
[
"def",
"blackman",
"(",
"M",
",",
"dtype",
"=",
"None",
",",
"ctx",
"=",
"None",
")",
":",
"if",
"ctx",
"is",
"None",
":",
"ctx",
"=",
"current_context",
"(",
")",
"return",
"_npi",
".",
"blackman",
"(",
"M",
",",
"dtype",
"=",
"dtype",
",",
"ctx",
"=",
"ctx",
")"
] |
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/numpy/_symbol.py#L5593-L5667
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/llvmlite/ir/builder.py
|
python
|
IRBuilder.cbranch
|
(self, cond, truebr, falsebr)
|
return br
|
Conditional branch to *truebr* if *cond* is true, else to *falsebr*.
|
Conditional branch to *truebr* if *cond* is true, else to *falsebr*.
|
[
"Conditional",
"branch",
"to",
"*",
"truebr",
"*",
"if",
"*",
"cond",
"*",
"is",
"true",
"else",
"to",
"*",
"falsebr",
"*",
"."
] |
def cbranch(self, cond, truebr, falsebr):
"""
Conditional branch to *truebr* if *cond* is true, else to *falsebr*.
"""
br = instructions.ConditionalBranch(self.block, "br",
[cond, truebr, falsebr])
self._set_terminator(br)
return br
|
[
"def",
"cbranch",
"(",
"self",
",",
"cond",
",",
"truebr",
",",
"falsebr",
")",
":",
"br",
"=",
"instructions",
".",
"ConditionalBranch",
"(",
"self",
".",
"block",
",",
"\"br\"",
",",
"[",
"cond",
",",
"truebr",
",",
"falsebr",
"]",
")",
"self",
".",
"_set_terminator",
"(",
"br",
")",
"return",
"br"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/llvmlite/ir/builder.py#L794-L801
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/Queue.py
|
python
|
Queue.put
|
(self, item, block=True, timeout=None)
|
Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a non-negative number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
|
Put an item into the queue.
|
[
"Put",
"an",
"item",
"into",
"the",
"queue",
"."
] |
def put(self, item, block=True, timeout=None):
"""Put an item into the queue.
If optional args 'block' is true and 'timeout' is None (the default),
block if necessary until a free slot is available. If 'timeout' is
a non-negative number, it blocks at most 'timeout' seconds and raises
the Full exception if no free slot was available within that time.
Otherwise ('block' is false), put an item on the queue if a free slot
is immediately available, else raise the Full exception ('timeout'
is ignored in that case).
"""
self.not_full.acquire()
try:
if self.maxsize > 0:
if not block:
if self._qsize() == self.maxsize:
raise Full
elif timeout is None:
while self._qsize() == self.maxsize:
self.not_full.wait()
elif timeout < 0:
raise ValueError("'timeout' must be a non-negative number")
else:
endtime = _time() + timeout
while self._qsize() == self.maxsize:
remaining = endtime - _time()
if remaining <= 0.0:
raise Full
self.not_full.wait(remaining)
self._put(item)
self.unfinished_tasks += 1
self.not_empty.notify()
finally:
self.not_full.release()
|
[
"def",
"put",
"(",
"self",
",",
"item",
",",
"block",
"=",
"True",
",",
"timeout",
"=",
"None",
")",
":",
"self",
".",
"not_full",
".",
"acquire",
"(",
")",
"try",
":",
"if",
"self",
".",
"maxsize",
">",
"0",
":",
"if",
"not",
"block",
":",
"if",
"self",
".",
"_qsize",
"(",
")",
"==",
"self",
".",
"maxsize",
":",
"raise",
"Full",
"elif",
"timeout",
"is",
"None",
":",
"while",
"self",
".",
"_qsize",
"(",
")",
"==",
"self",
".",
"maxsize",
":",
"self",
".",
"not_full",
".",
"wait",
"(",
")",
"elif",
"timeout",
"<",
"0",
":",
"raise",
"ValueError",
"(",
"\"'timeout' must be a non-negative number\"",
")",
"else",
":",
"endtime",
"=",
"_time",
"(",
")",
"+",
"timeout",
"while",
"self",
".",
"_qsize",
"(",
")",
"==",
"self",
".",
"maxsize",
":",
"remaining",
"=",
"endtime",
"-",
"_time",
"(",
")",
"if",
"remaining",
"<=",
"0.0",
":",
"raise",
"Full",
"self",
".",
"not_full",
".",
"wait",
"(",
"remaining",
")",
"self",
".",
"_put",
"(",
"item",
")",
"self",
".",
"unfinished_tasks",
"+=",
"1",
"self",
".",
"not_empty",
".",
"notify",
"(",
")",
"finally",
":",
"self",
".",
"not_full",
".",
"release",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/Queue.py#L107-L140
|
||
clementine-player/Clementine
|
111379dfd027802b59125829fcf87e3e1d0ad73b
|
dist/cpplint.py
|
python
|
Match
|
(pattern, s)
|
return _regexp_compile_cache[pattern].match(s)
|
Matches the string with the pattern, caching the compiled regexp.
|
Matches the string with the pattern, caching the compiled regexp.
|
[
"Matches",
"the",
"string",
"with",
"the",
"pattern",
"caching",
"the",
"compiled",
"regexp",
"."
] |
def Match(pattern, s):
"""Matches the string with the pattern, caching the compiled regexp."""
# The regexp compilation caching is inlined in both Match and Search for
# performance reasons; factoring it out into a separate function turns out
# to be noticeably expensive.
if pattern not in _regexp_compile_cache:
_regexp_compile_cache[pattern] = sre_compile.compile(pattern)
return _regexp_compile_cache[pattern].match(s)
|
[
"def",
"Match",
"(",
"pattern",
",",
"s",
")",
":",
"# The regexp compilation caching is inlined in both Match and Search for",
"# performance reasons; factoring it out into a separate function turns out",
"# to be noticeably expensive.",
"if",
"pattern",
"not",
"in",
"_regexp_compile_cache",
":",
"_regexp_compile_cache",
"[",
"pattern",
"]",
"=",
"sre_compile",
".",
"compile",
"(",
"pattern",
")",
"return",
"_regexp_compile_cache",
"[",
"pattern",
"]",
".",
"match",
"(",
"s",
")"
] |
https://github.com/clementine-player/Clementine/blob/111379dfd027802b59125829fcf87e3e1d0ad73b/dist/cpplint.py#L551-L558
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/keras/distribute/distribute_coordinator_utils.py
|
python
|
_WorkerContext.num_workers
|
(self)
|
return self._num_workers
|
Returns number of workers in the cluster, including chief.
|
Returns number of workers in the cluster, including chief.
|
[
"Returns",
"number",
"of",
"workers",
"in",
"the",
"cluster",
"including",
"chief",
"."
] |
def num_workers(self):
"""Returns number of workers in the cluster, including chief."""
return self._num_workers
|
[
"def",
"num_workers",
"(",
"self",
")",
":",
"return",
"self",
".",
"_num_workers"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/distribute/distribute_coordinator_utils.py#L267-L269
|
|
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Path/PathScripts/PathProfile.py
|
python
|
ObjectProfile.initAreaOp
|
(self, obj)
|
initAreaOp(obj) ... creates all profile specific properties.
|
initAreaOp(obj) ... creates all profile specific properties.
|
[
"initAreaOp",
"(",
"obj",
")",
"...",
"creates",
"all",
"profile",
"specific",
"properties",
"."
] |
def initAreaOp(self, obj):
"""initAreaOp(obj) ... creates all profile specific properties."""
self.propertiesReady = False
self.initAreaOpProperties(obj)
obj.setEditorMode("MiterLimit", 2)
obj.setEditorMode("JoinType", 2)
|
[
"def",
"initAreaOp",
"(",
"self",
",",
"obj",
")",
":",
"self",
".",
"propertiesReady",
"=",
"False",
"self",
".",
"initAreaOpProperties",
"(",
"obj",
")",
"obj",
".",
"setEditorMode",
"(",
"\"MiterLimit\"",
",",
"2",
")",
"obj",
".",
"setEditorMode",
"(",
"\"JoinType\"",
",",
"2",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/PathProfile.py#L66-L72
|
||
aosp-mirror/platform_system_core
|
eb710bfa72ad6461ab147f77d8873c561efa1010
|
storaged/tools/ranker.py
|
python
|
display_uids
|
(uid_rank, uids, args)
|
Display ranked uid io, along with task io if specified.
|
Display ranked uid io, along with task io if specified.
|
[
"Display",
"ranked",
"uid",
"io",
"along",
"with",
"task",
"io",
"if",
"specified",
"."
] |
def display_uids(uid_rank, uids, args):
"""Display ranked uid io, along with task io if specified."""
fout = sys.stdout
if args.output != "stdout":
fout = open(args.output, "w")
for i in range(8):
fout.write("RANKING BY " + IO_NAMES[i] + "\n")
for j in range(min(args.uidcnt, len(uid_rank[0]))):
uid = uid_rank[i][j][1]
uid_stat = " ".join([str(uid_io) for uid_io in uids[uid][0]])
fout.write(uid + " " + uid_stat + "\n")
if args.task:
for task in uids[uid][1]:
task_stat = " ".join([str(task_io) for task_io in uids[uid][1][task]])
fout.write("-> " + task + " " + task_stat + "\n")
fout.write("\n")
|
[
"def",
"display_uids",
"(",
"uid_rank",
",",
"uids",
",",
"args",
")",
":",
"fout",
"=",
"sys",
".",
"stdout",
"if",
"args",
".",
"output",
"!=",
"\"stdout\"",
":",
"fout",
"=",
"open",
"(",
"args",
".",
"output",
",",
"\"w\"",
")",
"for",
"i",
"in",
"range",
"(",
"8",
")",
":",
"fout",
".",
"write",
"(",
"\"RANKING BY \"",
"+",
"IO_NAMES",
"[",
"i",
"]",
"+",
"\"\\n\"",
")",
"for",
"j",
"in",
"range",
"(",
"min",
"(",
"args",
".",
"uidcnt",
",",
"len",
"(",
"uid_rank",
"[",
"0",
"]",
")",
")",
")",
":",
"uid",
"=",
"uid_rank",
"[",
"i",
"]",
"[",
"j",
"]",
"[",
"1",
"]",
"uid_stat",
"=",
"\" \"",
".",
"join",
"(",
"[",
"str",
"(",
"uid_io",
")",
"for",
"uid_io",
"in",
"uids",
"[",
"uid",
"]",
"[",
"0",
"]",
"]",
")",
"fout",
".",
"write",
"(",
"uid",
"+",
"\" \"",
"+",
"uid_stat",
"+",
"\"\\n\"",
")",
"if",
"args",
".",
"task",
":",
"for",
"task",
"in",
"uids",
"[",
"uid",
"]",
"[",
"1",
"]",
":",
"task_stat",
"=",
"\" \"",
".",
"join",
"(",
"[",
"str",
"(",
"task_io",
")",
"for",
"task_io",
"in",
"uids",
"[",
"uid",
"]",
"[",
"1",
"]",
"[",
"task",
"]",
"]",
")",
"fout",
".",
"write",
"(",
"\"-> \"",
"+",
"task",
"+",
"\" \"",
"+",
"task_stat",
"+",
"\"\\n\"",
")",
"fout",
".",
"write",
"(",
"\"\\n\"",
")"
] |
https://github.com/aosp-mirror/platform_system_core/blob/eb710bfa72ad6461ab147f77d8873c561efa1010/storaged/tools/ranker.py#L155-L171
|
||
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml.py
|
python
|
SAXCallback.attributeDecl
|
(self, elem, name, type, defi, defaultValue, nameList)
|
called when an ATTRIBUTE definition has been found
|
called when an ATTRIBUTE definition has been found
|
[
"called",
"when",
"an",
"ATTRIBUTE",
"definition",
"has",
"been",
"found"
] |
def attributeDecl(self, elem, name, type, defi, defaultValue, nameList):
"""called when an ATTRIBUTE definition has been found"""
pass
|
[
"def",
"attributeDecl",
"(",
"self",
",",
"elem",
",",
"name",
",",
"type",
",",
"defi",
",",
"defaultValue",
",",
"nameList",
")",
":",
"pass"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml.py#L236-L238
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.