nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
myriadrf/LoRa-SDR
c545c51e5e37284363a971ec298f72255646a6fa
RN2483.py
python
RN2483.enableCW
(self)
Enable CW, remeber to reset after to use LoRa again.
Enable CW, remeber to reset after to use LoRa again.
[ "Enable", "CW", "remeber", "to", "reset", "after", "to", "use", "LoRa", "again", "." ]
def enableCW(self): """ Enable CW, remeber to reset after to use LoRa again. """ self.command('radio cw on')
[ "def", "enableCW", "(", "self", ")", ":", "self", ".", "command", "(", "'radio cw on'", ")" ]
https://github.com/myriadrf/LoRa-SDR/blob/c545c51e5e37284363a971ec298f72255646a6fa/RN2483.py#L58-L62
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
tools/idl_parser/idl_parser.py
python
IDLParser.p_AttributeNameKeyword
(self, p)
AttributeNameKeyword : REQUIRED
AttributeNameKeyword : REQUIRED
[ "AttributeNameKeyword", ":", "REQUIRED" ]
def p_AttributeNameKeyword(self, p): """AttributeNameKeyword : REQUIRED""" p[0] = p[1]
[ "def", "p_AttributeNameKeyword", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/idl_parser/idl_parser.py#L617-L619
facebook/openr
ed38bdfd6bf290084bfab4821b59f83e7b59315d
openr/py/openr/cli/clis/fib.py
python
FibSnoopCli.snoop
( cli_opts: Bunch, # noqa: B902 duration: int, initial_dump: bool, prefixes: List[str], )
Snoop on fib streaming updates.
Snoop on fib streaming updates.
[ "Snoop", "on", "fib", "streaming", "updates", "." ]
def snoop( cli_opts: Bunch, # noqa: B902 duration: int, initial_dump: bool, prefixes: List[str], ): """Snoop on fib streaming updates.""" fib.FibSnoopCmd(cli_opts).run(duration, initial_dump, prefixes)
[ "def", "snoop", "(", "cli_opts", ":", "Bunch", ",", "# noqa: B902", "duration", ":", "int", ",", "initial_dump", ":", "bool", ",", "prefixes", ":", "List", "[", "str", "]", ",", ")", ":", "fib", ".", "FibSnoopCmd", "(", "cli_opts", ")", ".", "run", "(", "duration", ",", "initial_dump", ",", "prefixes", ")" ]
https://github.com/facebook/openr/blob/ed38bdfd6bf290084bfab4821b59f83e7b59315d/openr/py/openr/cli/clis/fib.py#L176-L184
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/dask_cudf/dask_cudf/accessors.py
python
ListMethods.len
(self)
return self.d_series.map_partitions( lambda s: s.list.len(), meta=self.d_series._meta )
Computes the length of each element in the Series/Index. Returns ------- Series or Index Examples -------- >>> s = cudf.Series([[1, 2, 3], None, [4, 5]]) >>> ds = dask_cudf.from_cudf(s, 2) >>> ds 0 [1, 2, 3] 1 None 2 [4, 5] dtype: list >>> ds.list.len().compute() 0 3 1 <NA> 2 2 dtype: int32
Computes the length of each element in the Series/Index.
[ "Computes", "the", "length", "of", "each", "element", "in", "the", "Series", "/", "Index", "." ]
def len(self): """ Computes the length of each element in the Series/Index. Returns ------- Series or Index Examples -------- >>> s = cudf.Series([[1, 2, 3], None, [4, 5]]) >>> ds = dask_cudf.from_cudf(s, 2) >>> ds 0 [1, 2, 3] 1 None 2 [4, 5] dtype: list >>> ds.list.len().compute() 0 3 1 <NA> 2 2 dtype: int32 """ return self.d_series.map_partitions( lambda s: s.list.len(), meta=self.d_series._meta )
[ "def", "len", "(", "self", ")", ":", "return", "self", ".", "d_series", ".", "map_partitions", "(", "lambda", "s", ":", "s", ".", "list", ".", "len", "(", ")", ",", "meta", "=", "self", ".", "d_series", ".", "_meta", ")" ]
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/dask_cudf/dask_cudf/accessors.py#L71-L96
baidu/AnyQ
d94d450d2aaa5f7ed73424b10aa4539835b97527
tools/simnet/train/tf/layers/tf_layers.py
python
AttentionLayer.ops
(self, input_x)
return output
operation
operation
[ "operation" ]
def ops(self, input_x): """ operation """ input_shape = input_x.shape sequence_length = input_shape[1].value # suppose input_x is not time major v = tf.tanh(tf.matmul(tf.reshape(input_x, [-1, self.hidden_size]), self.W) + tf.reshape(self.b, [1, -1])) vu = tf.matmul(v, tf.reshape(self.u, [-1, 1])) exps = tf.reshape(tf.exp(vu), [-1, sequence_length]) alphas = exps / tf.reshape(tf.reduce_sum(exps, 1), [-1, 1]) # Output of Bi-RNN is reduced with attention vector output = tf.reduce_sum( input_x * tf.reshape(alphas, [-1, sequence_length, 1]), 1) return output
[ "def", "ops", "(", "self", ",", "input_x", ")", ":", "input_shape", "=", "input_x", ".", "shape", "sequence_length", "=", "input_shape", "[", "1", "]", ".", "value", "# suppose input_x is not time major", "v", "=", "tf", ".", "tanh", "(", "tf", ".", "matmul", "(", "tf", ".", "reshape", "(", "input_x", ",", "[", "-", "1", ",", "self", ".", "hidden_size", "]", ")", ",", "self", ".", "W", ")", "+", "tf", ".", "reshape", "(", "self", ".", "b", ",", "[", "1", ",", "-", "1", "]", ")", ")", "vu", "=", "tf", ".", "matmul", "(", "v", ",", "tf", ".", "reshape", "(", "self", ".", "u", ",", "[", "-", "1", ",", "1", "]", ")", ")", "exps", "=", "tf", ".", "reshape", "(", "tf", ".", "exp", "(", "vu", ")", ",", "[", "-", "1", ",", "sequence_length", "]", ")", "alphas", "=", "exps", "/", "tf", ".", "reshape", "(", "tf", ".", "reduce_sum", "(", "exps", ",", "1", ")", ",", "[", "-", "1", ",", "1", "]", ")", "# Output of Bi-RNN is reduced with attention vector", "output", "=", "tf", ".", "reduce_sum", "(", "input_x", "*", "tf", ".", "reshape", "(", "alphas", ",", "[", "-", "1", ",", "sequence_length", ",", "1", "]", ")", ",", "1", ")", "return", "output" ]
https://github.com/baidu/AnyQ/blob/d94d450d2aaa5f7ed73424b10aa4539835b97527/tools/simnet/train/tf/layers/tf_layers.py#L96-L111
hpi-xnor/BMXNet-v2
af2b1859eafc5c721b1397cef02f946aaf2ce20d
python/mxnet/ndarray/ndarray.py
python
NDArray.context
(self)
return Context(Context.devtype2str[dev_typeid.value], dev_id.value)
Device context of the array. Examples -------- >>> x = mx.nd.array([1, 2, 3, 4]) >>> x.context cpu(0) >>> type(x.context) <class 'mxnet.context.Context'> >>> y = mx.nd.zeros((2,3), mx.gpu(0)) >>> y.context gpu(0)
Device context of the array.
[ "Device", "context", "of", "the", "array", "." ]
def context(self): """Device context of the array. Examples -------- >>> x = mx.nd.array([1, 2, 3, 4]) >>> x.context cpu(0) >>> type(x.context) <class 'mxnet.context.Context'> >>> y = mx.nd.zeros((2,3), mx.gpu(0)) >>> y.context gpu(0) """ dev_typeid = ctypes.c_int() dev_id = ctypes.c_int() check_call(_LIB.MXNDArrayGetContext( self.handle, ctypes.byref(dev_typeid), ctypes.byref(dev_id))) return Context(Context.devtype2str[dev_typeid.value], dev_id.value)
[ "def", "context", "(", "self", ")", ":", "dev_typeid", "=", "ctypes", ".", "c_int", "(", ")", "dev_id", "=", "ctypes", ".", "c_int", "(", ")", "check_call", "(", "_LIB", ".", "MXNDArrayGetContext", "(", "self", ".", "handle", ",", "ctypes", ".", "byref", "(", "dev_typeid", ")", ",", "ctypes", ".", "byref", "(", "dev_id", ")", ")", ")", "return", "Context", "(", "Context", ".", "devtype2str", "[", "dev_typeid", ".", "value", "]", ",", "dev_id", ".", "value", ")" ]
https://github.com/hpi-xnor/BMXNet-v2/blob/af2b1859eafc5c721b1397cef02f946aaf2ce20d/python/mxnet/ndarray/ndarray.py#L1904-L1922
bundy-dns/bundy
3d41934996b82b0cd2fe22dd74d2abc1daba835d
src/lib/python/bundy/server_common/tsig_keyring.py
python
Updater.__update
(self, value=None, module_cfg=None)
Update the key ring by the configuration. Note that this function is used as a callback, but can raise on bad data. The bad data is expected to be handled by the configuration plugin and not be allowed as far as here. The parameters are there just to match the signature which the callback should have (i.e. they are ignored).
Update the key ring by the configuration.
[ "Update", "the", "key", "ring", "by", "the", "configuration", "." ]
def __update(self, value=None, module_cfg=None): """ Update the key ring by the configuration. Note that this function is used as a callback, but can raise on bad data. The bad data is expected to be handled by the configuration plugin and not be allowed as far as here. The parameters are there just to match the signature which the callback should have (i.e. they are ignored). """ logger.debug(logger.DBGLVL_TRACE_BASIC, PYSERVER_COMMON_TSIG_KEYRING_UPDATE) (data, _) = self.__session.get_remote_config_value('tsig_keys', 'keys') if data is not None: # There's an update keyring = bundy.dns.TSIGKeyRing() for key_data in data: key = bundy.dns.TSIGKey(key_data) if keyring.add(key) != bundy.dns.TSIGKeyRing.SUCCESS: raise AddError("Can't add key " + str(key)) self.__keyring = keyring
[ "def", "__update", "(", "self", ",", "value", "=", "None", ",", "module_cfg", "=", "None", ")", ":", "logger", ".", "debug", "(", "logger", ".", "DBGLVL_TRACE_BASIC", ",", "PYSERVER_COMMON_TSIG_KEYRING_UPDATE", ")", "(", "data", ",", "_", ")", "=", "self", ".", "__session", ".", "get_remote_config_value", "(", "'tsig_keys'", ",", "'keys'", ")", "if", "data", "is", "not", "None", ":", "# There's an update", "keyring", "=", "bundy", ".", "dns", ".", "TSIGKeyRing", "(", ")", "for", "key_data", "in", "data", ":", "key", "=", "bundy", ".", "dns", ".", "TSIGKey", "(", "key_data", ")", "if", "keyring", ".", "add", "(", "key", ")", "!=", "bundy", ".", "dns", ".", "TSIGKeyRing", ".", "SUCCESS", ":", "raise", "AddError", "(", "\"Can't add key \"", "+", "str", "(", "key", ")", ")", "self", ".", "__keyring", "=", "keyring" ]
https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/server_common/tsig_keyring.py#L58-L78
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftfunctions/extrude.py
python
extrude
(obj, vector, solid=False)
return newobj
extrude(object, vector, [solid]) Create a Part::Extrusion object from a given object. Parameters ---------- obj : vector : Base.Vector The extrusion direction and module. solid : bool TODO: describe.
extrude(object, vector, [solid]) Create a Part::Extrusion object from a given object.
[ "extrude", "(", "object", "vector", "[", "solid", "]", ")", "Create", "a", "Part", "::", "Extrusion", "object", "from", "a", "given", "object", "." ]
def extrude(obj, vector, solid=False): """extrude(object, vector, [solid]) Create a Part::Extrusion object from a given object. Parameters ---------- obj : vector : Base.Vector The extrusion direction and module. solid : bool TODO: describe. """ if not App.ActiveDocument: App.Console.PrintError("No active document. Aborting\n") return newobj = App.ActiveDocument.addObject("Part::Extrusion", "Extrusion") newobj.Base = obj newobj.Dir = vector newobj.Solid = solid if App.GuiUp: obj.ViewObject.Visibility = False gui_utils.format_object(newobj,obj) gui_utils.select(newobj) return newobj
[ "def", "extrude", "(", "obj", ",", "vector", ",", "solid", "=", "False", ")", ":", "if", "not", "App", ".", "ActiveDocument", ":", "App", ".", "Console", ".", "PrintError", "(", "\"No active document. Aborting\\n\"", ")", "return", "newobj", "=", "App", ".", "ActiveDocument", ".", "addObject", "(", "\"Part::Extrusion\"", ",", "\"Extrusion\"", ")", "newobj", ".", "Base", "=", "obj", "newobj", ".", "Dir", "=", "vector", "newobj", ".", "Solid", "=", "solid", "if", "App", ".", "GuiUp", ":", "obj", ".", "ViewObject", ".", "Visibility", "=", "False", "gui_utils", ".", "format_object", "(", "newobj", ",", "obj", ")", "gui_utils", ".", "select", "(", "newobj", ")", "return", "newobj" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftfunctions/extrude.py#L34-L61
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
buildQName
(ncname, prefix, memory, len)
return ret
Builds the QName @prefix:@ncname in @memory if there is enough space and prefix is not None nor empty, otherwise allocate a new string. If prefix is None or empty it returns ncname.
Builds the QName
[ "Builds", "the", "QName" ]
def buildQName(ncname, prefix, memory, len): """Builds the QName @prefix:@ncname in @memory if there is enough space and prefix is not None nor empty, otherwise allocate a new string. If prefix is None or empty it returns ncname. """ ret = libxml2mod.xmlBuildQName(ncname, prefix, memory, len) return ret
[ "def", "buildQName", "(", "ncname", ",", "prefix", ",", "memory", ",", "len", ")", ":", "ret", "=", "libxml2mod", ".", "xmlBuildQName", "(", "ncname", ",", "prefix", ",", "memory", ",", "len", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L1659-L1665
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/AWSPythonSDK/1.5.8/botocore/hooks.py
python
BaseEventHooks.register
(self, event_name, handler, unique_id=None, unique_id_uses_count=False)
Register an event handler for a given event. If a ``unique_id`` is given, the handler will not be registered if a handler with the ``unique_id`` has already been registered. Handlers are called in the order they have been registered. Note handlers can also be registered with ``register_first()`` and ``register_last()``. All handlers registered with ``register_first()`` are called before handlers registered with ``register()`` which are called before handlers registered with ``register_last()``.
Register an event handler for a given event.
[ "Register", "an", "event", "handler", "for", "a", "given", "event", "." ]
def register(self, event_name, handler, unique_id=None, unique_id_uses_count=False): """Register an event handler for a given event. If a ``unique_id`` is given, the handler will not be registered if a handler with the ``unique_id`` has already been registered. Handlers are called in the order they have been registered. Note handlers can also be registered with ``register_first()`` and ``register_last()``. All handlers registered with ``register_first()`` are called before handlers registered with ``register()`` which are called before handlers registered with ``register_last()``. """ self._verify_and_register(event_name, handler, unique_id, register_method=self._register, unique_id_uses_count=unique_id_uses_count)
[ "def", "register", "(", "self", ",", "event_name", ",", "handler", ",", "unique_id", "=", "None", ",", "unique_id_uses_count", "=", "False", ")", ":", "self", ".", "_verify_and_register", "(", "event_name", ",", "handler", ",", "unique_id", ",", "register_method", "=", "self", ".", "_register", ",", "unique_id_uses_count", "=", "unique_id_uses_count", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/botocore/hooks.py#L83-L100
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/ndarray/utils.py
python
array
(source_array, ctx=None, dtype=None)
Creates an array from any object exposing the array interface. Parameters ---------- source_array : array_like An object exposing the array interface, an object whose `__array__` method returns an array, or any (nested) sequence. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `float32` otherwise. Returns ------- NDArray, RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import numpy as np >>> mx.nd.array([1, 2, 3]) <NDArray 3 @cpu(0)> >>> mx.nd.array([[1, 2], [3, 4]]) <NDArray 2x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2))) <NDArray 3x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2)), mx.gpu(0)) <NDArray 3x2 @gpu(0)> >>> mx.nd.array(mx.nd.zeros((3, 2), stype='row_sparse')) <RowSparseNDArray 3x2 @cpu(0)>
Creates an array from any object exposing the array interface.
[ "Creates", "an", "array", "from", "any", "object", "exposing", "the", "array", "interface", "." ]
def array(source_array, ctx=None, dtype=None): """Creates an array from any object exposing the array interface. Parameters ---------- source_array : array_like An object exposing the array interface, an object whose `__array__` method returns an array, or any (nested) sequence. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `float32` otherwise. Returns ------- NDArray, RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import numpy as np >>> mx.nd.array([1, 2, 3]) <NDArray 3 @cpu(0)> >>> mx.nd.array([[1, 2], [3, 4]]) <NDArray 2x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2))) <NDArray 3x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2)), mx.gpu(0)) <NDArray 3x2 @gpu(0)> >>> mx.nd.array(mx.nd.zeros((3, 2), stype='row_sparse')) <RowSparseNDArray 3x2 @cpu(0)> """ if spsp is not None and isinstance(source_array, spsp.csr.csr_matrix): return _sparse_array(source_array, ctx=ctx, dtype=dtype) elif isinstance(source_array, NDArray) and source_array.stype != 'default': return _sparse_array(source_array, ctx=ctx, dtype=dtype) else: return _array(source_array, ctx=ctx, dtype=dtype)
[ "def", "array", "(", "source_array", ",", "ctx", "=", "None", ",", "dtype", "=", "None", ")", ":", "if", "spsp", "is", "not", "None", "and", "isinstance", "(", "source_array", ",", "spsp", ".", "csr", ".", "csr_matrix", ")", ":", "return", "_sparse_array", "(", "source_array", ",", "ctx", "=", "ctx", ",", "dtype", "=", "dtype", ")", "elif", "isinstance", "(", "source_array", ",", "NDArray", ")", "and", "source_array", ".", "stype", "!=", "'default'", ":", "return", "_sparse_array", "(", "source_array", ",", "ctx", "=", "ctx", ",", "dtype", "=", "dtype", ")", "else", ":", "return", "_array", "(", "source_array", ",", "ctx", "=", "ctx", ",", "dtype", "=", "dtype", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/ndarray/utils.py#L108-L146
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/io/idl.py
python
_read_array
(f, typecode, array_desc)
return array
Read an array of type `typecode`, with the array descriptor given as `array_desc`.
Read an array of type `typecode`, with the array descriptor given as `array_desc`.
[ "Read", "an", "array", "of", "type", "typecode", "with", "the", "array", "descriptor", "given", "as", "array_desc", "." ]
def _read_array(f, typecode, array_desc): ''' Read an array of type `typecode`, with the array descriptor given as `array_desc`. ''' if typecode in [1, 3, 4, 5, 6, 9, 13, 14, 15]: if typecode == 1: nbytes = _read_int32(f) if nbytes != array_desc['nbytes']: warnings.warn("Not able to verify number of bytes from header") # Read bytes as numpy array array = np.fromstring(f.read(array_desc['nbytes']), dtype=DTYPE_DICT[typecode]) elif typecode in [2, 12]: # These are 2 byte types, need to skip every two as they are not packed array = np.fromstring(f.read(array_desc['nbytes']*2), dtype=DTYPE_DICT[typecode])[1::2] else: # Read bytes into list array = [] for i in range(array_desc['nelements']): dtype = typecode data = _read_data(f, dtype) array.append(data) array = np.array(array, dtype=np.object_) # Reshape array if needed if array_desc['ndims'] > 1: dims = array_desc['dims'][:int(array_desc['ndims'])] dims.reverse() array = array.reshape(dims) # Go to next alignment position _align_32(f) return array
[ "def", "_read_array", "(", "f", ",", "typecode", ",", "array_desc", ")", ":", "if", "typecode", "in", "[", "1", ",", "3", ",", "4", ",", "5", ",", "6", ",", "9", ",", "13", ",", "14", ",", "15", "]", ":", "if", "typecode", "==", "1", ":", "nbytes", "=", "_read_int32", "(", "f", ")", "if", "nbytes", "!=", "array_desc", "[", "'nbytes'", "]", ":", "warnings", ".", "warn", "(", "\"Not able to verify number of bytes from header\"", ")", "# Read bytes as numpy array", "array", "=", "np", ".", "fromstring", "(", "f", ".", "read", "(", "array_desc", "[", "'nbytes'", "]", ")", ",", "dtype", "=", "DTYPE_DICT", "[", "typecode", "]", ")", "elif", "typecode", "in", "[", "2", ",", "12", "]", ":", "# These are 2 byte types, need to skip every two as they are not packed", "array", "=", "np", ".", "fromstring", "(", "f", ".", "read", "(", "array_desc", "[", "'nbytes'", "]", "*", "2", ")", ",", "dtype", "=", "DTYPE_DICT", "[", "typecode", "]", ")", "[", "1", ":", ":", "2", "]", "else", ":", "# Read bytes into list", "array", "=", "[", "]", "for", "i", "in", "range", "(", "array_desc", "[", "'nelements'", "]", ")", ":", "dtype", "=", "typecode", "data", "=", "_read_data", "(", "f", ",", "dtype", ")", "array", ".", "append", "(", "data", ")", "array", "=", "np", ".", "array", "(", "array", ",", "dtype", "=", "np", ".", "object_", ")", "# Reshape array if needed", "if", "array_desc", "[", "'ndims'", "]", ">", "1", ":", "dims", "=", "array_desc", "[", "'dims'", "]", "[", ":", "int", "(", "array_desc", "[", "'ndims'", "]", ")", "]", "dims", ".", "reverse", "(", ")", "array", "=", "array", ".", "reshape", "(", "dims", ")", "# Go to next alignment position", "_align_32", "(", "f", ")", "return", "array" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/io/idl.py#L270-L314
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py
python
CursesUI._screen_color_init
(self)
Initialization of screen colors.
Initialization of screen colors.
[ "Initialization", "of", "screen", "colors", "." ]
def _screen_color_init(self): """Initialization of screen colors.""" curses.start_color() curses.use_default_colors() self._color_pairs = {} color_index = 0 # Prepare color pairs. for fg_color in self._FOREGROUND_COLORS: for bg_color in self._BACKGROUND_COLORS: color_index += 1 curses.init_pair(color_index, self._FOREGROUND_COLORS[fg_color], self._BACKGROUND_COLORS[bg_color]) color_name = fg_color if bg_color != "transparent": color_name += "_on_" + bg_color self._color_pairs[color_name] = curses.color_pair(color_index) # Try getting color(s) available only under 256-color support. try: color_index += 1 curses.init_pair(color_index, 245, -1) self._color_pairs[cli_shared.COLOR_GRAY] = curses.color_pair(color_index) except curses.error: # Use fall-back color(s): self._color_pairs[cli_shared.COLOR_GRAY] = ( self._color_pairs[cli_shared.COLOR_GREEN]) # A_BOLD or A_BLINK is not really a "color". But place it here for # convenience. self._color_pairs["bold"] = curses.A_BOLD self._color_pairs["blink"] = curses.A_BLINK self._color_pairs["underline"] = curses.A_UNDERLINE # Default color pair to use when a specified color pair does not exist. self._default_color_pair = self._color_pairs[cli_shared.COLOR_WHITE]
[ "def", "_screen_color_init", "(", "self", ")", ":", "curses", ".", "start_color", "(", ")", "curses", ".", "use_default_colors", "(", ")", "self", ".", "_color_pairs", "=", "{", "}", "color_index", "=", "0", "# Prepare color pairs.", "for", "fg_color", "in", "self", ".", "_FOREGROUND_COLORS", ":", "for", "bg_color", "in", "self", ".", "_BACKGROUND_COLORS", ":", "color_index", "+=", "1", "curses", ".", "init_pair", "(", "color_index", ",", "self", ".", "_FOREGROUND_COLORS", "[", "fg_color", "]", ",", "self", ".", "_BACKGROUND_COLORS", "[", "bg_color", "]", ")", "color_name", "=", "fg_color", "if", "bg_color", "!=", "\"transparent\"", ":", "color_name", "+=", "\"_on_\"", "+", "bg_color", "self", ".", "_color_pairs", "[", "color_name", "]", "=", "curses", ".", "color_pair", "(", "color_index", ")", "# Try getting color(s) available only under 256-color support.", "try", ":", "color_index", "+=", "1", "curses", ".", "init_pair", "(", "color_index", ",", "245", ",", "-", "1", ")", "self", ".", "_color_pairs", "[", "cli_shared", ".", "COLOR_GRAY", "]", "=", "curses", ".", "color_pair", "(", "color_index", ")", "except", "curses", ".", "error", ":", "# Use fall-back color(s):", "self", ".", "_color_pairs", "[", "cli_shared", ".", "COLOR_GRAY", "]", "=", "(", "self", ".", "_color_pairs", "[", "cli_shared", ".", "COLOR_GREEN", "]", ")", "# A_BOLD or A_BLINK is not really a \"color\". But place it here for", "# convenience.", "self", ".", "_color_pairs", "[", "\"bold\"", "]", "=", "curses", ".", "A_BOLD", "self", ".", "_color_pairs", "[", "\"blink\"", "]", "=", "curses", ".", "A_BLINK", "self", ".", "_color_pairs", "[", "\"underline\"", "]", "=", "curses", ".", "A_UNDERLINE", "# Default color pair to use when a specified color pair does not exist.", "self", ".", "_default_color_pair", "=", "self", ".", "_color_pairs", "[", "cli_shared", ".", "COLOR_WHITE", "]" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py#L406-L443
mamedev/mame
02cd26d37ee11191f3e311e19e805d872cb1e3a4
scripts/build/png.py
python
color_triple
(color)
Convert a command line colour value to a RGB triple of integers. FIXME: Somewhere we need support for greyscale backgrounds etc.
Convert a command line colour value to a RGB triple of integers. FIXME: Somewhere we need support for greyscale backgrounds etc.
[ "Convert", "a", "command", "line", "colour", "value", "to", "a", "RGB", "triple", "of", "integers", ".", "FIXME", ":", "Somewhere", "we", "need", "support", "for", "greyscale", "backgrounds", "etc", "." ]
def color_triple(color): """ Convert a command line colour value to a RGB triple of integers. FIXME: Somewhere we need support for greyscale backgrounds etc. """ if color.startswith('#') and len(color) == 4: return (int(color[1], 16), int(color[2], 16), int(color[3], 16)) if color.startswith('#') and len(color) == 7: return (int(color[1:3], 16), int(color[3:5], 16), int(color[5:7], 16)) elif color.startswith('#') and len(color) == 13: return (int(color[1:5], 16), int(color[5:9], 16), int(color[9:13], 16))
[ "def", "color_triple", "(", "color", ")", ":", "if", "color", ".", "startswith", "(", "'#'", ")", "and", "len", "(", "color", ")", "==", "4", ":", "return", "(", "int", "(", "color", "[", "1", "]", ",", "16", ")", ",", "int", "(", "color", "[", "2", "]", ",", "16", ")", ",", "int", "(", "color", "[", "3", "]", ",", "16", ")", ")", "if", "color", ".", "startswith", "(", "'#'", ")", "and", "len", "(", "color", ")", "==", "7", ":", "return", "(", "int", "(", "color", "[", "1", ":", "3", "]", ",", "16", ")", ",", "int", "(", "color", "[", "3", ":", "5", "]", ",", "16", ")", ",", "int", "(", "color", "[", "5", ":", "7", "]", ",", "16", ")", ")", "elif", "color", ".", "startswith", "(", "'#'", ")", "and", "len", "(", "color", ")", "==", "13", ":", "return", "(", "int", "(", "color", "[", "1", ":", "5", "]", ",", "16", ")", ",", "int", "(", "color", "[", "5", ":", "9", "]", ",", "16", ")", ",", "int", "(", "color", "[", "9", ":", "13", "]", ",", "16", ")", ")" ]
https://github.com/mamedev/mame/blob/02cd26d37ee11191f3e311e19e805d872cb1e3a4/scripts/build/png.py#L2646-L2662
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_gdi.py
python
ImageList.AddIcon
(*args, **kwargs)
return _gdi_.ImageList_AddIcon(*args, **kwargs)
AddIcon(self, Icon icon) -> int
AddIcon(self, Icon icon) -> int
[ "AddIcon", "(", "self", "Icon", "icon", ")", "-", ">", "int" ]
def AddIcon(*args, **kwargs): """AddIcon(self, Icon icon) -> int""" return _gdi_.ImageList_AddIcon(*args, **kwargs)
[ "def", "AddIcon", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "ImageList_AddIcon", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_gdi.py#L6764-L6766
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/client/timeline.py
python
_TensorTracker.__init__
(self, name, object_id, timestamp, pid, allocator, num_bytes)
Creates an object to track tensor references. This class is not thread safe and is intended only for internal use by the 'Timeline' class in this file. Args: name: The name of the Tensor as a string. object_id: Chrome Trace object identifier assigned for this Tensor. timestamp: The creation timestamp of this event as a long integer. pid: Process identifier of the assicaiated device, as an integer. allocator: Name of the allocator used to create the Tensor. num_bytes: Number of bytes allocated (long integer). Returns: A 'TensorTracker' object.
Creates an object to track tensor references.
[ "Creates", "an", "object", "to", "track", "tensor", "references", "." ]
def __init__(self, name, object_id, timestamp, pid, allocator, num_bytes): """Creates an object to track tensor references. This class is not thread safe and is intended only for internal use by the 'Timeline' class in this file. Args: name: The name of the Tensor as a string. object_id: Chrome Trace object identifier assigned for this Tensor. timestamp: The creation timestamp of this event as a long integer. pid: Process identifier of the assicaiated device, as an integer. allocator: Name of the allocator used to create the Tensor. num_bytes: Number of bytes allocated (long integer). Returns: A 'TensorTracker' object. """ self._name = name self._pid = pid self._object_id = object_id self._create_time = timestamp self._allocator = allocator self._num_bytes = num_bytes self._ref_times = [] self._unref_times = []
[ "def", "__init__", "(", "self", ",", "name", ",", "object_id", ",", "timestamp", ",", "pid", ",", "allocator", ",", "num_bytes", ")", ":", "self", ".", "_name", "=", "name", "self", ".", "_pid", "=", "pid", "self", ".", "_object_id", "=", "object_id", "self", ".", "_create_time", "=", "timestamp", "self", ".", "_allocator", "=", "allocator", "self", ".", "_num_bytes", "=", "num_bytes", "self", ".", "_ref_times", "=", "[", "]", "self", ".", "_unref_times", "=", "[", "]" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/client/timeline.py#L269-L293
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/lin_ops/lin_utils.py
python
is_const
(operator)
return operator.type in [lo.SCALAR_CONST, lo.SPARSE_CONST, lo.DENSE_CONST, lo.PARAM]
Returns whether a LinOp is constant. Parameters ---------- operator : LinOp The LinOp to test. Returns ------- True if the LinOp is a constant, False otherwise.
Returns whether a LinOp is constant.
[ "Returns", "whether", "a", "LinOp", "is", "constant", "." ]
def is_const(operator) -> bool: """Returns whether a LinOp is constant. Parameters ---------- operator : LinOp The LinOp to test. Returns ------- True if the LinOp is a constant, False otherwise. """ return operator.type in [lo.SCALAR_CONST, lo.SPARSE_CONST, lo.DENSE_CONST, lo.PARAM]
[ "def", "is_const", "(", "operator", ")", "->", "bool", ":", "return", "operator", ".", "type", "in", "[", "lo", ".", "SCALAR_CONST", ",", "lo", ".", "SPARSE_CONST", ",", "lo", ".", "DENSE_CONST", ",", "lo", ".", "PARAM", "]" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/lin_ops/lin_utils.py#L144-L159
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/idna/core.py
python
uts46_remap
(domain, std3_rules=True, transitional=False)
Re-map the characters in the string according to UTS46 processing.
Re-map the characters in the string according to UTS46 processing.
[ "Re", "-", "map", "the", "characters", "in", "the", "string", "according", "to", "UTS46", "processing", "." ]
def uts46_remap(domain, std3_rules=True, transitional=False): """Re-map the characters in the string according to UTS46 processing.""" from .uts46data import uts46data output = u"" try: for pos, char in enumerate(domain): code_point = ord(char) uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] status = uts46row[1] replacement = uts46row[2] if len(uts46row) == 3 else None if (status == "V" or (status == "D" and not transitional) or (status == "3" and not std3_rules and replacement is None)): output += char elif replacement is not None and (status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional)): output += replacement elif status != "I": raise IndexError() return unicodedata.normalize("NFC", output) except IndexError: raise InvalidCodepoint( "Codepoint {0} not allowed at position {1} in {2}".format( _unot(code_point), pos + 1, repr(domain)))
[ "def", "uts46_remap", "(", "domain", ",", "std3_rules", "=", "True", ",", "transitional", "=", "False", ")", ":", "from", ".", "uts46data", "import", "uts46data", "output", "=", "u\"\"", "try", ":", "for", "pos", ",", "char", "in", "enumerate", "(", "domain", ")", ":", "code_point", "=", "ord", "(", "char", ")", "uts46row", "=", "uts46data", "[", "code_point", "if", "code_point", "<", "256", "else", "bisect", ".", "bisect_left", "(", "uts46data", ",", "(", "code_point", ",", "\"Z\"", ")", ")", "-", "1", "]", "status", "=", "uts46row", "[", "1", "]", "replacement", "=", "uts46row", "[", "2", "]", "if", "len", "(", "uts46row", ")", "==", "3", "else", "None", "if", "(", "status", "==", "\"V\"", "or", "(", "status", "==", "\"D\"", "and", "not", "transitional", ")", "or", "(", "status", "==", "\"3\"", "and", "not", "std3_rules", "and", "replacement", "is", "None", ")", ")", ":", "output", "+=", "char", "elif", "replacement", "is", "not", "None", "and", "(", "status", "==", "\"M\"", "or", "(", "status", "==", "\"3\"", "and", "not", "std3_rules", ")", "or", "(", "status", "==", "\"D\"", "and", "transitional", ")", ")", ":", "output", "+=", "replacement", "elif", "status", "!=", "\"I\"", ":", "raise", "IndexError", "(", ")", "return", "unicodedata", ".", "normalize", "(", "\"NFC\"", ",", "output", ")", "except", "IndexError", ":", "raise", "InvalidCodepoint", "(", "\"Codepoint {0} not allowed at position {1} in {2}\"", ".", "format", "(", "_unot", "(", "code_point", ")", ",", "pos", "+", "1", ",", "repr", "(", "domain", ")", ")", ")" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/idna/core.py#L312-L337
openmm/openmm
cb293447c4fc8b03976dfe11399f107bab70f3d9
wrappers/python/openmm/app/internal/pdbx/reader/PdbxReader.py
python
PdbxReader.__getContainerName
(self,inWord)
return str(inWord[5:]).strip()
Returns the name of the data_ or save_ container
Returns the name of the data_ or save_ container
[ "Returns", "the", "name", "of", "the", "data_", "or", "save_", "container" ]
def __getContainerName(self,inWord): """ Returns the name of the data_ or save_ container """ return str(inWord[5:]).strip()
[ "def", "__getContainerName", "(", "self", ",", "inWord", ")", ":", "return", "str", "(", "inWord", "[", "5", ":", "]", ")", ".", "strip", "(", ")" ]
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/app/internal/pdbx/reader/PdbxReader.py#L83-L86
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/package_finder.py
python
LinkEvaluator.evaluate_link
(self, link)
return (True, version)
Determine whether a link is a candidate for installation. :return: A tuple (is_candidate, result), where `result` is (1) a version string if `is_candidate` is True, and (2) if `is_candidate` is False, an optional string to log the reason the link fails to qualify.
[]
def evaluate_link(self, link): # type: (Link) -> Tuple[bool, Optional[str]] """ Determine whether a link is a candidate for installation. :return: A tuple (is_candidate, result), where `result` is (1) a version string if `is_candidate` is True, and (2) if `is_candidate` is False, an optional string to log the reason the link fails to qualify. """ version = None if link.is_yanked and not self._allow_yanked: reason = link.yanked_reason or '<none given>' return (False, f'yanked for reason: {reason}') if link.egg_fragment: egg_info = link.egg_fragment ext = link.ext else: egg_info, ext = link.splitext() if not ext: return (False, 'not a file') if ext not in SUPPORTED_EXTENSIONS: return (False, f'unsupported archive format: {ext}') if "binary" not in self._formats and ext == WHEEL_EXTENSION: reason = 'No binaries permitted for {}'.format( self.project_name) return (False, reason) if "macosx10" in link.path and ext == '.zip': return (False, 'macosx10 one') if ext == WHEEL_EXTENSION: try: wheel = Wheel(link.filename) except InvalidWheelFilename: return (False, 'invalid wheel filename') if canonicalize_name(wheel.name) != self._canonical_name: reason = 'wrong project name (not {})'.format( self.project_name) return (False, reason) supported_tags = self._target_python.get_tags() if not wheel.supported(supported_tags): # Include the wheel's tags in the reason string to # simplify troubleshooting compatibility issues. file_tags = wheel.get_formatted_file_tags() reason = ( "none of the wheel's tags match: {}".format( ', '.join(file_tags) ) ) return (False, reason) version = wheel.version # This should be up by the self.ok_binary check, but see issue 2700. if "source" not in self._formats and ext != WHEEL_EXTENSION: reason = f'No sources permitted for {self.project_name}' return (False, reason) if not version: version = _extract_version_from_fragment( egg_info, self._canonical_name, ) if not version: reason = f'Missing project version for {self.project_name}' return (False, reason) match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != self._target_python.py_version: return (False, 'Python version is incorrect') supports_python = _check_link_requires_python( link, version_info=self._target_python.py_version_info, ignore_requires_python=self._ignore_requires_python, ) if not supports_python: # Return None for the reason text to suppress calling # _log_skipped_link(). return (False, None) logger.debug('Found link %s, version: %s', link, version) return (True, version)
[ "def", "evaluate_link", "(", "self", ",", "link", ")", ":", "# type: (Link) -> Tuple[bool, Optional[str]]", "version", "=", "None", "if", "link", ".", "is_yanked", "and", "not", "self", ".", "_allow_yanked", ":", "reason", "=", "link", ".", "yanked_reason", "or", "'<none given>'", "return", "(", "False", ",", "f'yanked for reason: {reason}'", ")", "if", "link", ".", "egg_fragment", ":", "egg_info", "=", "link", ".", "egg_fragment", "ext", "=", "link", ".", "ext", "else", ":", "egg_info", ",", "ext", "=", "link", ".", "splitext", "(", ")", "if", "not", "ext", ":", "return", "(", "False", ",", "'not a file'", ")", "if", "ext", "not", "in", "SUPPORTED_EXTENSIONS", ":", "return", "(", "False", ",", "f'unsupported archive format: {ext}'", ")", "if", "\"binary\"", "not", "in", "self", ".", "_formats", "and", "ext", "==", "WHEEL_EXTENSION", ":", "reason", "=", "'No binaries permitted for {}'", ".", "format", "(", "self", ".", "project_name", ")", "return", "(", "False", ",", "reason", ")", "if", "\"macosx10\"", "in", "link", ".", "path", "and", "ext", "==", "'.zip'", ":", "return", "(", "False", ",", "'macosx10 one'", ")", "if", "ext", "==", "WHEEL_EXTENSION", ":", "try", ":", "wheel", "=", "Wheel", "(", "link", ".", "filename", ")", "except", "InvalidWheelFilename", ":", "return", "(", "False", ",", "'invalid wheel filename'", ")", "if", "canonicalize_name", "(", "wheel", ".", "name", ")", "!=", "self", ".", "_canonical_name", ":", "reason", "=", "'wrong project name (not {})'", ".", "format", "(", "self", ".", "project_name", ")", "return", "(", "False", ",", "reason", ")", "supported_tags", "=", "self", ".", "_target_python", ".", "get_tags", "(", ")", "if", "not", "wheel", ".", "supported", "(", "supported_tags", ")", ":", "# Include the wheel's tags in the reason string to", "# simplify troubleshooting compatibility issues.", "file_tags", "=", "wheel", ".", "get_formatted_file_tags", "(", ")", "reason", "=", "(", "\"none of the wheel's tags match: {}\"", ".", "format", "(", "', '", ".", "join", "(", "file_tags", ")", ")", ")", "return", "(", "False", ",", "reason", ")", "version", "=", "wheel", ".", "version", "# This should be up by the self.ok_binary check, but see issue 2700.", "if", "\"source\"", "not", "in", "self", ".", "_formats", "and", "ext", "!=", "WHEEL_EXTENSION", ":", "reason", "=", "f'No sources permitted for {self.project_name}'", "return", "(", "False", ",", "reason", ")", "if", "not", "version", ":", "version", "=", "_extract_version_from_fragment", "(", "egg_info", ",", "self", ".", "_canonical_name", ",", ")", "if", "not", "version", ":", "reason", "=", "f'Missing project version for {self.project_name}'", "return", "(", "False", ",", "reason", ")", "match", "=", "self", ".", "_py_version_re", ".", "search", "(", "version", ")", "if", "match", ":", "version", "=", "version", "[", ":", "match", ".", "start", "(", ")", "]", "py_version", "=", "match", ".", "group", "(", "1", ")", "if", "py_version", "!=", "self", ".", "_target_python", ".", "py_version", ":", "return", "(", "False", ",", "'Python version is incorrect'", ")", "supports_python", "=", "_check_link_requires_python", "(", "link", ",", "version_info", "=", "self", ".", "_target_python", ".", "py_version_info", ",", "ignore_requires_python", "=", "self", ".", "_ignore_requires_python", ",", ")", "if", "not", "supports_python", ":", "# Return None for the reason text to suppress calling", "# _log_skipped_link().", "return", "(", "False", ",", "None", ")", "logger", ".", "debug", "(", "'Found link %s, version: %s'", ",", "link", ",", "version", ")", "return", "(", "True", ",", "version", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_internal/index/package_finder.py#L301-L471
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/distribute/coordinator/values.py
python
PerWorkerDistributedIterator.get_next
(self, name=None)
Returns the next input from the iterator for all replicas.
Returns the next input from the iterator for all replicas.
[ "Returns", "the", "next", "input", "from", "the", "iterator", "for", "all", "replicas", "." ]
def get_next(self, name=None): """Returns the next input from the iterator for all replicas.""" raise NotImplementedError("Iterating over an `AsyncDistributedIterator` " "is not supported right now.")
[ "def", "get_next", "(", "self", ",", "name", "=", "None", ")", ":", "raise", "NotImplementedError", "(", "\"Iterating over an `AsyncDistributedIterator` \"", "\"is not supported right now.\"", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/distribute/coordinator/values.py#L462-L465
fengbingchun/NN_Test
d6305825d5273e4569ccd1eda9ffa2a9c72e18d2
src/tiny-dnn/third_party/cpplint.py
python
_SetFilters
(filters)
Sets the module's error-message filters. These filters are applied when deciding whether to emit a given error message. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die.
Sets the module's error-message filters.
[ "Sets", "the", "module", "s", "error", "-", "message", "filters", "." ]
def _SetFilters(filters): """Sets the module's error-message filters. These filters are applied when deciding whether to emit a given error message. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die. """ _cpplint_state.SetFilters(filters)
[ "def", "_SetFilters", "(", "filters", ")", ":", "_cpplint_state", ".", "SetFilters", "(", "filters", ")" ]
https://github.com/fengbingchun/NN_Test/blob/d6305825d5273e4569ccd1eda9ffa2a9c72e18d2/src/tiny-dnn/third_party/cpplint.py#L1154-L1164
cvmfs/cvmfs
4637bdb5153178eadf885c1acf37bdc5c685bf8a
cpplint.py
python
CheckRedundantOverrideOrFinal
(filename, clean_lines, linenum, error)
Check if line contains a redundant "override" or "final" virt-specifier. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Check if line contains a redundant "override" or "final" virt-specifier.
[ "Check", "if", "line", "contains", "a", "redundant", "override", "or", "final", "virt", "-", "specifier", "." ]
def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): """Check if line contains a redundant "override" or "final" virt-specifier. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ # Look for closing parenthesis nearby. We need one to confirm where # the declarator ends and where the virt-specifier starts to avoid # false positives. line = clean_lines.elided[linenum] declarator_end = line.rfind(')') if declarator_end >= 0: fragment = line[declarator_end:] else: if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0: fragment = line else: return # Check that at most one of "override" or "final" is present, not both if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment): error(filename, linenum, 'readability/inheritance', 4, ('"override" is redundant since function is ' 'already declared as "final"'))
[ "def", "CheckRedundantOverrideOrFinal", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "# Look for closing parenthesis nearby. We need one to confirm where", "# the declarator ends and where the virt-specifier starts to avoid", "# false positives.", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "declarator_end", "=", "line", ".", "rfind", "(", "')'", ")", "if", "declarator_end", ">=", "0", ":", "fragment", "=", "line", "[", "declarator_end", ":", "]", "else", ":", "if", "linenum", ">", "1", "and", "clean_lines", ".", "elided", "[", "linenum", "-", "1", "]", ".", "rfind", "(", "')'", ")", ">=", "0", ":", "fragment", "=", "line", "else", ":", "return", "# Check that at most one of \"override\" or \"final\" is present, not both", "if", "Search", "(", "r'\\boverride\\b'", ",", "fragment", ")", "and", "Search", "(", "r'\\bfinal\\b'", ",", "fragment", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'readability/inheritance'", ",", "4", ",", "(", "'\"override\" is redundant since function is '", "'already declared as \"final\"'", ")", ")" ]
https://github.com/cvmfs/cvmfs/blob/4637bdb5153178eadf885c1acf37bdc5c685bf8a/cpplint.py#L5818-L5844
emscripten-core/emscripten
0d413d3c5af8b28349682496edc14656f5700c2f
third_party/ply/example/GardenSnake/GardenSnake.py
python
p_funcdef
(p)
funcdef : DEF NAME parameters COLON suite
funcdef : DEF NAME parameters COLON suite
[ "funcdef", ":", "DEF", "NAME", "parameters", "COLON", "suite" ]
def p_funcdef(p): "funcdef : DEF NAME parameters COLON suite" p[0] = ast.Function(None, p[2], tuple(p[3]), (), 0, None, p[5])
[ "def", "p_funcdef", "(", "p", ")", ":", "p", "[", "0", "]", "=", "ast", ".", "Function", "(", "None", ",", "p", "[", "2", "]", ",", "tuple", "(", "p", "[", "3", "]", ")", ",", "(", ")", ",", "0", ",", "None", ",", "p", "[", "5", "]", ")" ]
https://github.com/emscripten-core/emscripten/blob/0d413d3c5af8b28349682496edc14656f5700c2f/third_party/ply/example/GardenSnake/GardenSnake.py#L383-L385
Kitware/ParaView
f760af9124ff4634b23ebbeab95a4f56e0261955
Wrapping/Python/paraview/servermanager.py
python
ProxyManager.__init__
(self, session=None)
Constructor. Assigned self.SMProxyManager to vtkSMProxyManager.GetProxyManager().
Constructor. Assigned self.SMProxyManager to vtkSMProxyManager.GetProxyManager().
[ "Constructor", ".", "Assigned", "self", ".", "SMProxyManager", "to", "vtkSMProxyManager", ".", "GetProxyManager", "()", "." ]
def __init__(self, session=None): """Constructor. Assigned self.SMProxyManager to vtkSMProxyManager.GetProxyManager().""" global ActiveConnection if not session: session = ActiveConnection.Session self.SMProxyManager = session.GetSessionProxyManager()
[ "def", "__init__", "(", "self", ",", "session", "=", "None", ")", ":", "global", "ActiveConnection", "if", "not", "session", ":", "session", "=", "ActiveConnection", ".", "Session", "self", ".", "SMProxyManager", "=", "session", ".", "GetSessionProxyManager", "(", ")" ]
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Wrapping/Python/paraview/servermanager.py#L1761-L1767
baoboa/pyqt5
11d5f43bc6f213d9d60272f3954a0048569cfc7c
configure.py
python
check_5_4_modules
(target_config, disabled_modules, verbose)
Check which modules introduced in Qt v5.4 can be built and update the target configuration accordingly. target_config is the target configuration. disabled_modules is the list of modules that have been explicitly disabled. verbose is set if the output is to be displayed.
Check which modules introduced in Qt v5.4 can be built and update the target configuration accordingly. target_config is the target configuration. disabled_modules is the list of modules that have been explicitly disabled. verbose is set if the output is to be displayed.
[ "Check", "which", "modules", "introduced", "in", "Qt", "v5", ".", "4", "can", "be", "built", "and", "update", "the", "target", "configuration", "accordingly", ".", "target_config", "is", "the", "target", "configuration", ".", "disabled_modules", "is", "the", "list", "of", "modules", "that", "have", "been", "explicitly", "disabled", ".", "verbose", "is", "set", "if", "the", "output", "is", "to", "be", "displayed", "." ]
def check_5_4_modules(target_config, disabled_modules, verbose): """ Check which modules introduced in Qt v5.4 can be built and update the target configuration accordingly. target_config is the target configuration. disabled_modules is the list of modules that have been explicitly disabled. verbose is set if the output is to be displayed. """ check_module(target_config, disabled_modules, verbose, 'QtWebChannel', 'qwebchannel.h', 'new QWebChannel()') check_module(target_config, disabled_modules, verbose, 'QtWebEngineWidgets', 'qwebengineview.h', 'new QWebEngineView()')
[ "def", "check_5_4_modules", "(", "target_config", ",", "disabled_modules", ",", "verbose", ")", ":", "check_module", "(", "target_config", ",", "disabled_modules", ",", "verbose", ",", "'QtWebChannel'", ",", "'qwebchannel.h'", ",", "'new QWebChannel()'", ")", "check_module", "(", "target_config", ",", "disabled_modules", ",", "verbose", ",", "'QtWebEngineWidgets'", ",", "'qwebengineview.h'", ",", "'new QWebEngineView()'", ")" ]
https://github.com/baoboa/pyqt5/blob/11d5f43bc6f213d9d60272f3954a0048569cfc7c/configure.py#L1421-L1431
takemaru/graphillion
51879f92bb96b53ef8f914ef37a05252ce383617
graphillion/graphset.py
python
GraphSet.cycles
(is_hamilton=False, graphset=None)
return GraphSet.graphs(vertex_groups=[[]], degree_constraints=dc, graphset=graphset)
Returns a GraphSet of cycles. This method can be parallelized with OpenMP by specifying the environmental variable `OMP_NUM_THREADS`: `$ OMP_NUM_THREADS=4 python your_graphillion_script.py` Examples: >>> GraphSet.cycles(is_hamilton=True) GraphSet([[(1, 2), (1, 4), (2, 3), (3, 6), (4, 5), (5, 6)]]) Args: is_hamilton: Optional. True or False. If true, cycles must be composed of all vertices. graphset: Optional. A GraphSet object. Cycles to be stored are selected from this object. Returns: A new GraphSet object. See Also: graphs()
Returns a GraphSet of cycles.
[ "Returns", "a", "GraphSet", "of", "cycles", "." ]
def cycles(is_hamilton=False, graphset=None): """Returns a GraphSet of cycles. This method can be parallelized with OpenMP by specifying the environmental variable `OMP_NUM_THREADS`: `$ OMP_NUM_THREADS=4 python your_graphillion_script.py` Examples: >>> GraphSet.cycles(is_hamilton=True) GraphSet([[(1, 2), (1, 4), (2, 3), (3, 6), (4, 5), (5, 6)]]) Args: is_hamilton: Optional. True or False. If true, cycles must be composed of all vertices. graphset: Optional. A GraphSet object. Cycles to be stored are selected from this object. Returns: A new GraphSet object. See Also: graphs() """ dc = {} for v in GraphSet._vertices: dc[v] = 2 if is_hamilton else range(0, 3, 2) return GraphSet.graphs(vertex_groups=[[]], degree_constraints=dc, graphset=graphset)
[ "def", "cycles", "(", "is_hamilton", "=", "False", ",", "graphset", "=", "None", ")", ":", "dc", "=", "{", "}", "for", "v", "in", "GraphSet", ".", "_vertices", ":", "dc", "[", "v", "]", "=", "2", "if", "is_hamilton", "else", "range", "(", "0", ",", "3", ",", "2", ")", "return", "GraphSet", ".", "graphs", "(", "vertex_groups", "=", "[", "[", "]", "]", ",", "degree_constraints", "=", "dc", ",", "graphset", "=", "graphset", ")" ]
https://github.com/takemaru/graphillion/blob/51879f92bb96b53ef8f914ef37a05252ce383617/graphillion/graphset.py#L1909-L1938
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/pot/openvino/tools/pot/statistics/functions/weights.py
python
calculate_per_filter_stats
(weights, fn, transpose=False)
return fn(t, axis=1)
Calculates per-filter statistics for weights using a specific function :param weights: model layer weights :param fn: function to calculate per-filter statistics :param transpose: transpose weights data from IOHW to OIHW to collect stats :return statistics generated by fn
Calculates per-filter statistics for weights using a specific function :param weights: model layer weights :param fn: function to calculate per-filter statistics :param transpose: transpose weights data from IOHW to OIHW to collect stats :return statistics generated by fn
[ "Calculates", "per", "-", "filter", "statistics", "for", "weights", "using", "a", "specific", "function", ":", "param", "weights", ":", "model", "layer", "weights", ":", "param", "fn", ":", "function", "to", "calculate", "per", "-", "filter", "statistics", ":", "param", "transpose", ":", "transpose", "weights", "data", "from", "IOHW", "to", "OIHW", "to", "collect", "stats", ":", "return", "statistics", "generated", "by", "fn" ]
def calculate_per_filter_stats(weights, fn, transpose=False): """ Calculates per-filter statistics for weights using a specific function :param weights: model layer weights :param fn: function to calculate per-filter statistics :param transpose: transpose weights data from IOHW to OIHW to collect stats :return statistics generated by fn """ if transpose: weights_shape = [1, 0] original_axes = np.array(range(len(weights.shape))) weights_shape.extend(original_axes[2:]) weights = np.transpose(weights, weights_shape) if not weights.shape: return fn(weights) t = np.reshape(weights, (weights.shape[0], -1)) return fn(t, axis=1)
[ "def", "calculate_per_filter_stats", "(", "weights", ",", "fn", ",", "transpose", "=", "False", ")", ":", "if", "transpose", ":", "weights_shape", "=", "[", "1", ",", "0", "]", "original_axes", "=", "np", ".", "array", "(", "range", "(", "len", "(", "weights", ".", "shape", ")", ")", ")", "weights_shape", ".", "extend", "(", "original_axes", "[", "2", ":", "]", ")", "weights", "=", "np", ".", "transpose", "(", "weights", ",", "weights_shape", ")", "if", "not", "weights", ".", "shape", ":", "return", "fn", "(", "weights", ")", "t", "=", "np", ".", "reshape", "(", "weights", ",", "(", "weights", ".", "shape", "[", "0", "]", ",", "-", "1", ")", ")", "return", "fn", "(", "t", ",", "axis", "=", "1", ")" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/pot/openvino/tools/pot/statistics/functions/weights.py#L14-L29
visionworkbench/visionworkbench
eff1ee8f0efd70565292031d12c4b960db80f48f
graveyard/Plate/plate2kml.py
python
TileRegion.project_to_level
(self, level)
return TileRegion( level, proj_bbox )
Return a TileRegion representing the extent of this TileRegion, projected onto a different level of the tile pyramid.
Return a TileRegion representing the extent of this TileRegion, projected onto a different level of the tile pyramid.
[ "Return", "a", "TileRegion", "representing", "the", "extent", "of", "this", "TileRegion", "projected", "onto", "a", "different", "level", "of", "the", "tile", "pyramid", "." ]
def project_to_level(self, level): """ Return a TileRegion representing the extent of this TileRegion, projected onto a different level of the tile pyramid. """ level_delta = level - self.level if level_delta == 0: return self scale_factor = 2 ** level_delta proj_bbox = BBox( self.minx * scale_factor, self.miny * scale_factor, self.width * scale_factor, self.height * scale_factor ) if level_delta < 0: # Ensure that region bounds are still integers for prop in (proj_bbox.minx, proj_bbox.miny, proj_bbox.width, proj_bbox.height): assert prop % 1 == 0 return TileRegion( level, proj_bbox )
[ "def", "project_to_level", "(", "self", ",", "level", ")", ":", "level_delta", "=", "level", "-", "self", ".", "level", "if", "level_delta", "==", "0", ":", "return", "self", "scale_factor", "=", "2", "**", "level_delta", "proj_bbox", "=", "BBox", "(", "self", ".", "minx", "*", "scale_factor", ",", "self", ".", "miny", "*", "scale_factor", ",", "self", ".", "width", "*", "scale_factor", ",", "self", ".", "height", "*", "scale_factor", ")", "if", "level_delta", "<", "0", ":", "# Ensure that region bounds are still integers", "for", "prop", "in", "(", "proj_bbox", ".", "minx", ",", "proj_bbox", ".", "miny", ",", "proj_bbox", ".", "width", ",", "proj_bbox", ".", "height", ")", ":", "assert", "prop", "%", "1", "==", "0", "return", "TileRegion", "(", "level", ",", "proj_bbox", ")" ]
https://github.com/visionworkbench/visionworkbench/blob/eff1ee8f0efd70565292031d12c4b960db80f48f/graveyard/Plate/plate2kml.py#L266-L287
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py2/pandas/core/generic.py
python
NDFrame._add_series_or_dataframe_operations
(cls)
Add the series or dataframe only operations to the cls; evaluate the doc strings again.
Add the series or dataframe only operations to the cls; evaluate the doc strings again.
[ "Add", "the", "series", "or", "dataframe", "only", "operations", "to", "the", "cls", ";", "evaluate", "the", "doc", "strings", "again", "." ]
def _add_series_or_dataframe_operations(cls): """ Add the series or dataframe only operations to the cls; evaluate the doc strings again. """ from pandas.core import window as rwindow @Appender(rwindow.rolling.__doc__) def rolling(self, window, min_periods=None, center=False, win_type=None, on=None, axis=0, closed=None): axis = self._get_axis_number(axis) return rwindow.rolling(self, window=window, min_periods=min_periods, center=center, win_type=win_type, on=on, axis=axis, closed=closed) cls.rolling = rolling @Appender(rwindow.expanding.__doc__) def expanding(self, min_periods=1, center=False, axis=0): axis = self._get_axis_number(axis) return rwindow.expanding(self, min_periods=min_periods, center=center, axis=axis) cls.expanding = expanding @Appender(rwindow.ewm.__doc__) def ewm(self, com=None, span=None, halflife=None, alpha=None, min_periods=0, adjust=True, ignore_na=False, axis=0): axis = self._get_axis_number(axis) return rwindow.ewm(self, com=com, span=span, halflife=halflife, alpha=alpha, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na, axis=axis) cls.ewm = ewm
[ "def", "_add_series_or_dataframe_operations", "(", "cls", ")", ":", "from", "pandas", ".", "core", "import", "window", "as", "rwindow", "@", "Appender", "(", "rwindow", ".", "rolling", ".", "__doc__", ")", "def", "rolling", "(", "self", ",", "window", ",", "min_periods", "=", "None", ",", "center", "=", "False", ",", "win_type", "=", "None", ",", "on", "=", "None", ",", "axis", "=", "0", ",", "closed", "=", "None", ")", ":", "axis", "=", "self", ".", "_get_axis_number", "(", "axis", ")", "return", "rwindow", ".", "rolling", "(", "self", ",", "window", "=", "window", ",", "min_periods", "=", "min_periods", ",", "center", "=", "center", ",", "win_type", "=", "win_type", ",", "on", "=", "on", ",", "axis", "=", "axis", ",", "closed", "=", "closed", ")", "cls", ".", "rolling", "=", "rolling", "@", "Appender", "(", "rwindow", ".", "expanding", ".", "__doc__", ")", "def", "expanding", "(", "self", ",", "min_periods", "=", "1", ",", "center", "=", "False", ",", "axis", "=", "0", ")", ":", "axis", "=", "self", ".", "_get_axis_number", "(", "axis", ")", "return", "rwindow", ".", "expanding", "(", "self", ",", "min_periods", "=", "min_periods", ",", "center", "=", "center", ",", "axis", "=", "axis", ")", "cls", ".", "expanding", "=", "expanding", "@", "Appender", "(", "rwindow", ".", "ewm", ".", "__doc__", ")", "def", "ewm", "(", "self", ",", "com", "=", "None", ",", "span", "=", "None", ",", "halflife", "=", "None", ",", "alpha", "=", "None", ",", "min_periods", "=", "0", ",", "adjust", "=", "True", ",", "ignore_na", "=", "False", ",", "axis", "=", "0", ")", ":", "axis", "=", "self", ".", "_get_axis_number", "(", "axis", ")", "return", "rwindow", ".", "ewm", "(", "self", ",", "com", "=", "com", ",", "span", "=", "span", ",", "halflife", "=", "halflife", ",", "alpha", "=", "alpha", ",", "min_periods", "=", "min_periods", ",", "adjust", "=", "adjust", ",", "ignore_na", "=", "ignore_na", ",", "axis", "=", "axis", ")", "cls", ".", "ewm", "=", "ewm" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/generic.py#L10132-L10168
epiqc/ScaffCC
66a79944ee4cd116b27bc1a69137276885461db8
llvm/bindings/python/llvm/object.py
python
Symbol.expire
(self)
Mark the object as expired to prevent future API accesses. This is called internally by this module and it is unlikely that external callers have a legitimate reason for using it.
Mark the object as expired to prevent future API accesses.
[ "Mark", "the", "object", "as", "expired", "to", "prevent", "future", "API", "accesses", "." ]
def expire(self): """Mark the object as expired to prevent future API accesses. This is called internally by this module and it is unlikely that external callers have a legitimate reason for using it. """ self.expired = True
[ "def", "expire", "(", "self", ")", ":", "self", ".", "expired", "=", "True" ]
https://github.com/epiqc/ScaffCC/blob/66a79944ee4cd116b27bc1a69137276885461db8/llvm/bindings/python/llvm/object.py#L349-L355
ProgerXP/Notepad2e
71585758099ec07d61dd14ba806068c0d937efd3
scintilla/scripts/Dependencies.py
python
ExtractDependencies
(input)
return deps
Create a list of dependencies from input list of lines Each element contains the name of the object and a list of files that it depends on. Dependencies that contain "/usr/" are removed as they are system headers.
Create a list of dependencies from input list of lines Each element contains the name of the object and a list of files that it depends on. Dependencies that contain "/usr/" are removed as they are system headers.
[ "Create", "a", "list", "of", "dependencies", "from", "input", "list", "of", "lines", "Each", "element", "contains", "the", "name", "of", "the", "object", "and", "a", "list", "of", "files", "that", "it", "depends", "on", ".", "Dependencies", "that", "contain", "/", "usr", "/", "are", "removed", "as", "they", "are", "system", "headers", "." ]
def ExtractDependencies(input): """ Create a list of dependencies from input list of lines Each element contains the name of the object and a list of files that it depends on. Dependencies that contain "/usr/" are removed as they are system headers. """ deps = [] for line in input: headersLine = line.startswith(" ") or line.startswith("\t") line = line.strip() isContinued = line.endswith("\\") line = line.rstrip("\\ ") fileNames = line.strip().split(" ") if not headersLine: # its a source file line, there may be headers too sourceLine = fileNames[0].rstrip(":") fileNames = fileNames[1:] deps.append([sourceLine, []]) deps[-1][1].extend(header for header in fileNames if "/usr/" not in header) return deps
[ "def", "ExtractDependencies", "(", "input", ")", ":", "deps", "=", "[", "]", "for", "line", "in", "input", ":", "headersLine", "=", "line", ".", "startswith", "(", "\" \"", ")", "or", "line", ".", "startswith", "(", "\"\\t\"", ")", "line", "=", "line", ".", "strip", "(", ")", "isContinued", "=", "line", ".", "endswith", "(", "\"\\\\\"", ")", "line", "=", "line", ".", "rstrip", "(", "\"\\\\ \"", ")", "fileNames", "=", "line", ".", "strip", "(", ")", ".", "split", "(", "\" \"", ")", "if", "not", "headersLine", ":", "# its a source file line, there may be headers too", "sourceLine", "=", "fileNames", "[", "0", "]", ".", "rstrip", "(", "\":\"", ")", "fileNames", "=", "fileNames", "[", "1", ":", "]", "deps", ".", "append", "(", "[", "sourceLine", ",", "[", "]", "]", ")", "deps", "[", "-", "1", "]", "[", "1", "]", ".", "extend", "(", "header", "for", "header", "in", "fileNames", "if", "\"/usr/\"", "not", "in", "header", ")", "return", "deps" ]
https://github.com/ProgerXP/Notepad2e/blob/71585758099ec07d61dd14ba806068c0d937efd3/scintilla/scripts/Dependencies.py#L101-L120
acbull/Unbiased_LambdaMart
7c39abe5caa18ca07df2d23c2db392916d92956c
Unbias_LightGBM/python-package/lightgbm/basic.py
python
cint32_array_to_numpy
(cptr, length)
Convert a ctypes float pointer array to a numpy array.
Convert a ctypes float pointer array to a numpy array.
[ "Convert", "a", "ctypes", "float", "pointer", "array", "to", "a", "numpy", "array", "." ]
def cint32_array_to_numpy(cptr, length): """Convert a ctypes float pointer array to a numpy array. """ if isinstance(cptr, ctypes.POINTER(ctypes.c_int32)): return np.fromiter(cptr, dtype=np.int32, count=length) else: raise RuntimeError('Expected int pointer')
[ "def", "cint32_array_to_numpy", "(", "cptr", ",", "length", ")", ":", "if", "isinstance", "(", "cptr", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_int32", ")", ")", ":", "return", "np", ".", "fromiter", "(", "cptr", ",", "dtype", "=", "np", ".", "int32", ",", "count", "=", "length", ")", "else", ":", "raise", "RuntimeError", "(", "'Expected int pointer'", ")" ]
https://github.com/acbull/Unbiased_LambdaMart/blob/7c39abe5caa18ca07df2d23c2db392916d92956c/Unbias_LightGBM/python-package/lightgbm/basic.py#L107-L113
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/datasets/lfw.py
python
load_lfw_pairs
(download_if_missing=False, **kwargs)
return fetch_lfw_pairs(download_if_missing=download_if_missing, **kwargs)
Alias for fetch_lfw_pairs(download_if_missing=False) .. deprecated:: 0.17 This function will be removed in 0.19. Use :func:`sklearn.datasets.fetch_lfw_pairs` with parameter ``download_if_missing=False`` instead. Check fetch_lfw_pairs.__doc__ for the documentation and parameter list.
Alias for fetch_lfw_pairs(download_if_missing=False)
[ "Alias", "for", "fetch_lfw_pairs", "(", "download_if_missing", "=", "False", ")" ]
def load_lfw_pairs(download_if_missing=False, **kwargs): """ Alias for fetch_lfw_pairs(download_if_missing=False) .. deprecated:: 0.17 This function will be removed in 0.19. Use :func:`sklearn.datasets.fetch_lfw_pairs` with parameter ``download_if_missing=False`` instead. Check fetch_lfw_pairs.__doc__ for the documentation and parameter list. """ return fetch_lfw_pairs(download_if_missing=download_if_missing, **kwargs)
[ "def", "load_lfw_pairs", "(", "download_if_missing", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "fetch_lfw_pairs", "(", "download_if_missing", "=", "download_if_missing", ",", "*", "*", "kwargs", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/datasets/lfw.py#L517-L528
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/interface/matrix_utilities.py
python
is_sparse_symmetric
(m, complex: bool = False)
return check
Check if a sparse matrix is symmetric Parameters ---------- m : array or sparse matrix A square matrix. Returns ------- check : bool The check result.
Check if a sparse matrix is symmetric
[ "Check", "if", "a", "sparse", "matrix", "is", "symmetric" ]
def is_sparse_symmetric(m, complex: bool = False) -> bool: """Check if a sparse matrix is symmetric Parameters ---------- m : array or sparse matrix A square matrix. Returns ------- check : bool The check result. """ # https://mail.scipy.org/pipermail/scipy-dev/2014-October/020101.html if m.shape[0] != m.shape[1]: raise ValueError('m must be a square matrix') if not isinstance(m, sp.coo_matrix): m = sp.coo_matrix(m) r, c, v = m.row, m.col, m.data tril_no_diag = r > c triu_no_diag = c > r if triu_no_diag.sum() != tril_no_diag.sum(): return False rl = r[tril_no_diag] cl = c[tril_no_diag] vl = v[tril_no_diag] ru = r[triu_no_diag] cu = c[triu_no_diag] vu = v[triu_no_diag] sortl = np.lexsort((cl, rl)) sortu = np.lexsort((ru, cu)) vl = vl[sortl] vu = vu[sortu] if complex: check = np.allclose(vl, np.conj(vu)) else: check = np.allclose(vl, vu) return check
[ "def", "is_sparse_symmetric", "(", "m", ",", "complex", ":", "bool", "=", "False", ")", "->", "bool", ":", "# https://mail.scipy.org/pipermail/scipy-dev/2014-October/020101.html", "if", "m", ".", "shape", "[", "0", "]", "!=", "m", ".", "shape", "[", "1", "]", ":", "raise", "ValueError", "(", "'m must be a square matrix'", ")", "if", "not", "isinstance", "(", "m", ",", "sp", ".", "coo_matrix", ")", ":", "m", "=", "sp", ".", "coo_matrix", "(", "m", ")", "r", ",", "c", ",", "v", "=", "m", ".", "row", ",", "m", ".", "col", ",", "m", ".", "data", "tril_no_diag", "=", "r", ">", "c", "triu_no_diag", "=", "c", ">", "r", "if", "triu_no_diag", ".", "sum", "(", ")", "!=", "tril_no_diag", ".", "sum", "(", ")", ":", "return", "False", "rl", "=", "r", "[", "tril_no_diag", "]", "cl", "=", "c", "[", "tril_no_diag", "]", "vl", "=", "v", "[", "tril_no_diag", "]", "ru", "=", "r", "[", "triu_no_diag", "]", "cu", "=", "c", "[", "triu_no_diag", "]", "vu", "=", "v", "[", "triu_no_diag", "]", "sortl", "=", "np", ".", "lexsort", "(", "(", "cl", ",", "rl", ")", ")", "sortu", "=", "np", ".", "lexsort", "(", "(", "ru", ",", "cu", ")", ")", "vl", "=", "vl", "[", "sortl", "]", "vu", "=", "vu", "[", "sortu", "]", "if", "complex", ":", "check", "=", "np", ".", "allclose", "(", "vl", ",", "np", ".", "conj", "(", "vu", ")", ")", "else", ":", "check", "=", "np", ".", "allclose", "(", "vl", ",", "vu", ")", "return", "check" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/interface/matrix_utilities.py#L292-L337
plumonito/dtslam
5994bb9cf7a11981b830370db206bceb654c085d
3rdparty/opencv-git/samples/python2/video.py
python
create_capture
(source = 0, fallback = presets['chess'])
return cap
source: <int> or '<int>|<filename>|synth [:<param_name>=<value> [:...]]'
source: <int> or '<int>|<filename>|synth [:<param_name>=<value> [:...]]'
[ "source", ":", "<int", ">", "or", "<int", ">", "|<filename", ">", "|synth", "[", ":", "<param_name", ">", "=", "<value", ">", "[", ":", "...", "]]" ]
def create_capture(source = 0, fallback = presets['chess']): '''source: <int> or '<int>|<filename>|synth [:<param_name>=<value> [:...]]' ''' source = str(source).strip() chunks = source.split(':') # handle drive letter ('c:', ...) if len(chunks) > 1 and len(chunks[0]) == 1 and chunks[0].isalpha(): chunks[1] = chunks[0] + ':' + chunks[1] del chunks[0] source = chunks[0] try: source = int(source) except ValueError: pass params = dict( s.split('=') for s in chunks[1:] ) cap = None if source == 'synth': Class = classes.get(params.get('class', None), VideoSynthBase) try: cap = Class(**params) except: pass else: cap = cv2.VideoCapture(source) if 'size' in params: w, h = map(int, params['size'].split('x')) cap.set(cv2.CAP_PROP_FRAME_WIDTH, w) cap.set(cv2.CAP_PROP_FRAME_HEIGHT, h) if cap is None or not cap.isOpened(): print 'Warning: unable to open video source: ', source if fallback is not None: return create_capture(fallback, None) return cap
[ "def", "create_capture", "(", "source", "=", "0", ",", "fallback", "=", "presets", "[", "'chess'", "]", ")", ":", "source", "=", "str", "(", "source", ")", ".", "strip", "(", ")", "chunks", "=", "source", ".", "split", "(", "':'", ")", "# handle drive letter ('c:', ...)", "if", "len", "(", "chunks", ")", ">", "1", "and", "len", "(", "chunks", "[", "0", "]", ")", "==", "1", "and", "chunks", "[", "0", "]", ".", "isalpha", "(", ")", ":", "chunks", "[", "1", "]", "=", "chunks", "[", "0", "]", "+", "':'", "+", "chunks", "[", "1", "]", "del", "chunks", "[", "0", "]", "source", "=", "chunks", "[", "0", "]", "try", ":", "source", "=", "int", "(", "source", ")", "except", "ValueError", ":", "pass", "params", "=", "dict", "(", "s", ".", "split", "(", "'='", ")", "for", "s", "in", "chunks", "[", "1", ":", "]", ")", "cap", "=", "None", "if", "source", "==", "'synth'", ":", "Class", "=", "classes", ".", "get", "(", "params", ".", "get", "(", "'class'", ",", "None", ")", ",", "VideoSynthBase", ")", "try", ":", "cap", "=", "Class", "(", "*", "*", "params", ")", "except", ":", "pass", "else", ":", "cap", "=", "cv2", ".", "VideoCapture", "(", "source", ")", "if", "'size'", "in", "params", ":", "w", ",", "h", "=", "map", "(", "int", ",", "params", "[", "'size'", "]", ".", "split", "(", "'x'", ")", ")", "cap", ".", "set", "(", "cv2", ".", "CAP_PROP_FRAME_WIDTH", ",", "w", ")", "cap", ".", "set", "(", "cv2", ".", "CAP_PROP_FRAME_HEIGHT", ",", "h", ")", "if", "cap", "is", "None", "or", "not", "cap", ".", "isOpened", "(", ")", ":", "print", "'Warning: unable to open video source: '", ",", "source", "if", "fallback", "is", "not", "None", ":", "return", "create_capture", "(", "fallback", ",", "None", ")", "return", "cap" ]
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/samples/python2/video.py#L138-L168
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
ctpx/ctp3/ctptd.py
python
CtpTd.onFrontDisconnected
(self, reasonCode)
0x2003 收到错误报文
0x2003 收到错误报文
[ "0x2003", "收到错误报文" ]
def onFrontDisconnected(self, reasonCode): """0x2003 收到错误报文""" pass
[ "def", "onFrontDisconnected", "(", "self", ",", "reasonCode", ")", ":", "pass" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/ctpx/ctp3/ctptd.py#L43-L45
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/aui.py
python
AuiToolBar.ClearTools
(*args, **kwargs)
return _aui.AuiToolBar_ClearTools(*args, **kwargs)
ClearTools(self)
ClearTools(self)
[ "ClearTools", "(", "self", ")" ]
def ClearTools(*args, **kwargs): """ClearTools(self)""" return _aui.AuiToolBar_ClearTools(*args, **kwargs)
[ "def", "ClearTools", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_aui", ".", "AuiToolBar_ClearTools", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/aui.py#L2070-L2072
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/linear.py
python
LinearRegressor._get_predict_ops
(self, features)
return super(LinearRegressor, self)._get_predict_ops(features)
See base class.
See base class.
[ "See", "base", "class", "." ]
def _get_predict_ops(self, features): """See base class.""" self._validate_linear_feature_columns(features) return super(LinearRegressor, self)._get_predict_ops(features)
[ "def", "_get_predict_ops", "(", "self", ",", "features", ")", ":", "self", ".", "_validate_linear_feature_columns", "(", "features", ")", "return", "super", "(", "LinearRegressor", ",", "self", ")", ".", "_get_predict_ops", "(", "features", ")" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/estimators/linear.py#L343-L346
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/python/turicreate/toolkits/_tf_utils.py
python
convert_conv1d_coreml_to_tf
(conv_weights)
return np.squeeze(conv_weights, axis=3)
The Convolutional weights in CoreML specification converted to the TensorFlow format for training in TensorFlow. Parameters ---------- conv_weights: 4d numpy array of shape [outputChannels, kernelChannels, kernelHeight, kernelWidth] Returns ------- return: 3d numpy array of shape [kernelWidth, kernelChannels, outputChannels] since kernelHeight = 1 for conv1d
The Convolutional weights in CoreML specification converted to the TensorFlow format for training in TensorFlow.
[ "The", "Convolutional", "weights", "in", "CoreML", "specification", "converted", "to", "the", "TensorFlow", "format", "for", "training", "in", "TensorFlow", "." ]
def convert_conv1d_coreml_to_tf(conv_weights): """ The Convolutional weights in CoreML specification converted to the TensorFlow format for training in TensorFlow. Parameters ---------- conv_weights: 4d numpy array of shape [outputChannels, kernelChannels, kernelHeight, kernelWidth] Returns ------- return: 3d numpy array of shape [kernelWidth, kernelChannels, outputChannels] since kernelHeight = 1 for conv1d """ conv_weights = np.transpose(conv_weights, (3, 1, 0, 2)) return np.squeeze(conv_weights, axis=3)
[ "def", "convert_conv1d_coreml_to_tf", "(", "conv_weights", ")", ":", "conv_weights", "=", "np", ".", "transpose", "(", "conv_weights", ",", "(", "3", ",", "1", ",", "0", ",", "2", ")", ")", "return", "np", ".", "squeeze", "(", "conv_weights", ",", "axis", "=", "3", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/toolkits/_tf_utils.py#L98-L117
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/sparse/linalg/eigen/arpack/arpack.py
python
svds
(A, k=6, ncv=None, tol=0, which='LM', v0=None, maxiter=None, return_singular_vectors=True)
return u, s, vh
Compute the largest k singular values/vectors for a sparse matrix. Parameters ---------- A : {sparse matrix, LinearOperator} Array to compute the SVD on, of shape (M, N) k : int, optional Number of singular values and vectors to compute. Must be 1 <= k < min(A.shape). ncv : int, optional The number of Lanczos vectors generated ncv must be greater than k+1 and smaller than n; it is recommended that ncv > 2*k Default: ``min(n, max(2*k + 1, 20))`` tol : float, optional Tolerance for singular values. Zero (default) means machine precision. which : str, ['LM' | 'SM'], optional Which `k` singular values to find: - 'LM' : largest singular values - 'SM' : smallest singular values .. versionadded:: 0.12.0 v0 : ndarray, optional Starting vector for iteration, of length min(A.shape). Should be an (approximate) left singular vector if N > M and a right singular vector otherwise. Default: random .. versionadded:: 0.12.0 maxiter : int, optional Maximum number of iterations. .. versionadded:: 0.12.0 return_singular_vectors : bool or str, optional - True: return singular vectors (True) in addition to singular values. .. versionadded:: 0.12.0 - "u": only return the u matrix, without computing vh (if N > M). - "vh": only return the vh matrix, without computing u (if N <= M). .. versionadded:: 0.16.0 Returns ------- u : ndarray, shape=(M, k) Unitary matrix having left singular vectors as columns. If `return_singular_vectors` is "vh", this variable is not computed, and None is returned instead. s : ndarray, shape=(k,) The singular values. vt : ndarray, shape=(k, N) Unitary matrix having right singular vectors as rows. If `return_singular_vectors` is "u", this variable is not computed, and None is returned instead. Notes ----- This is a naive implementation using ARPACK as an eigensolver on A.H * A or A * A.H, depending on which one is more efficient.
Compute the largest k singular values/vectors for a sparse matrix.
[ "Compute", "the", "largest", "k", "singular", "values", "/", "vectors", "for", "a", "sparse", "matrix", "." ]
def svds(A, k=6, ncv=None, tol=0, which='LM', v0=None, maxiter=None, return_singular_vectors=True): """Compute the largest k singular values/vectors for a sparse matrix. Parameters ---------- A : {sparse matrix, LinearOperator} Array to compute the SVD on, of shape (M, N) k : int, optional Number of singular values and vectors to compute. Must be 1 <= k < min(A.shape). ncv : int, optional The number of Lanczos vectors generated ncv must be greater than k+1 and smaller than n; it is recommended that ncv > 2*k Default: ``min(n, max(2*k + 1, 20))`` tol : float, optional Tolerance for singular values. Zero (default) means machine precision. which : str, ['LM' | 'SM'], optional Which `k` singular values to find: - 'LM' : largest singular values - 'SM' : smallest singular values .. versionadded:: 0.12.0 v0 : ndarray, optional Starting vector for iteration, of length min(A.shape). Should be an (approximate) left singular vector if N > M and a right singular vector otherwise. Default: random .. versionadded:: 0.12.0 maxiter : int, optional Maximum number of iterations. .. versionadded:: 0.12.0 return_singular_vectors : bool or str, optional - True: return singular vectors (True) in addition to singular values. .. versionadded:: 0.12.0 - "u": only return the u matrix, without computing vh (if N > M). - "vh": only return the vh matrix, without computing u (if N <= M). .. versionadded:: 0.16.0 Returns ------- u : ndarray, shape=(M, k) Unitary matrix having left singular vectors as columns. If `return_singular_vectors` is "vh", this variable is not computed, and None is returned instead. s : ndarray, shape=(k,) The singular values. vt : ndarray, shape=(k, N) Unitary matrix having right singular vectors as rows. If `return_singular_vectors` is "u", this variable is not computed, and None is returned instead. Notes ----- This is a naive implementation using ARPACK as an eigensolver on A.H * A or A * A.H, depending on which one is more efficient. """ if not (isinstance(A, LinearOperator) or isspmatrix(A)): A = np.asarray(A) n, m = A.shape if k <= 0 or k >= min(n, m): raise ValueError("k must be between 1 and min(A.shape), k=%d" % k) if isinstance(A, LinearOperator): if n > m: X_dot = A.matvec X_matmat = A.matmat XH_dot = A.rmatvec else: X_dot = A.rmatvec XH_dot = A.matvec dtype = getattr(A, 'dtype', None) if dtype is None: dtype = A.dot(np.zeros([m,1])).dtype # A^H * V; works around lack of LinearOperator.adjoint. # XXX This can be slow! def X_matmat(V): out = np.empty((V.shape[1], m), dtype=dtype) for i, col in enumerate(V.T): out[i, :] = A.rmatvec(col.reshape(-1, 1)).T return out.T else: if n > m: X_dot = X_matmat = A.dot XH_dot = _herm(A).dot else: XH_dot = A.dot X_dot = X_matmat = _herm(A).dot def matvec_XH_X(x): return XH_dot(X_dot(x)) XH_X = LinearOperator(matvec=matvec_XH_X, dtype=A.dtype, shape=(min(A.shape), min(A.shape))) # Get a low rank approximation of the implicitly defined gramian matrix. # This is not a stable way to approach the problem. eigvals, eigvec = eigsh(XH_X, k=k, tol=tol ** 2, maxiter=maxiter, ncv=ncv, which=which, v0=v0) # In 'LM' mode try to be clever about small eigenvalues. # Otherwise in 'SM' mode do not try to be clever. if which == 'LM': # Gramian matrices have real non-negative eigenvalues. eigvals = np.maximum(eigvals.real, 0) # Use the sophisticated detection of small eigenvalues from pinvh. t = eigvec.dtype.char.lower() factor = {'f': 1E3, 'd': 1E6} cond = factor[t] * np.finfo(t).eps cutoff = cond * np.max(eigvals) # Get a mask indicating which eigenpairs are not degenerately tiny, # and create the re-ordered array of thresholded singular values. above_cutoff = (eigvals > cutoff) nlarge = above_cutoff.sum() nsmall = k - nlarge slarge = np.sqrt(eigvals[above_cutoff]) s = np.zeros_like(eigvals) s[:nlarge] = slarge if not return_singular_vectors: return s if n > m: vlarge = eigvec[:, above_cutoff] ularge = X_matmat(vlarge) / slarge if return_singular_vectors != 'vh' else None vhlarge = _herm(vlarge) else: ularge = eigvec[:, above_cutoff] vhlarge = _herm(X_matmat(ularge) / slarge) if return_singular_vectors != 'u' else None u = _augmented_orthonormal_cols(ularge, nsmall) if ularge is not None else None vh = _augmented_orthonormal_rows(vhlarge, nsmall) if vhlarge is not None else None elif which == 'SM': s = np.sqrt(eigvals) if not return_singular_vectors: return s if n > m: v = eigvec u = X_matmat(v) / s if return_singular_vectors != 'vh' else None vh = _herm(v) else: u = eigvec vh = _herm(X_matmat(u) / s) if return_singular_vectors != 'u' else None else: raise ValueError("which must be either 'LM' or 'SM'.") return u, s, vh
[ "def", "svds", "(", "A", ",", "k", "=", "6", ",", "ncv", "=", "None", ",", "tol", "=", "0", ",", "which", "=", "'LM'", ",", "v0", "=", "None", ",", "maxiter", "=", "None", ",", "return_singular_vectors", "=", "True", ")", ":", "if", "not", "(", "isinstance", "(", "A", ",", "LinearOperator", ")", "or", "isspmatrix", "(", "A", ")", ")", ":", "A", "=", "np", ".", "asarray", "(", "A", ")", "n", ",", "m", "=", "A", ".", "shape", "if", "k", "<=", "0", "or", "k", ">=", "min", "(", "n", ",", "m", ")", ":", "raise", "ValueError", "(", "\"k must be between 1 and min(A.shape), k=%d\"", "%", "k", ")", "if", "isinstance", "(", "A", ",", "LinearOperator", ")", ":", "if", "n", ">", "m", ":", "X_dot", "=", "A", ".", "matvec", "X_matmat", "=", "A", ".", "matmat", "XH_dot", "=", "A", ".", "rmatvec", "else", ":", "X_dot", "=", "A", ".", "rmatvec", "XH_dot", "=", "A", ".", "matvec", "dtype", "=", "getattr", "(", "A", ",", "'dtype'", ",", "None", ")", "if", "dtype", "is", "None", ":", "dtype", "=", "A", ".", "dot", "(", "np", ".", "zeros", "(", "[", "m", ",", "1", "]", ")", ")", ".", "dtype", "# A^H * V; works around lack of LinearOperator.adjoint.", "# XXX This can be slow!", "def", "X_matmat", "(", "V", ")", ":", "out", "=", "np", ".", "empty", "(", "(", "V", ".", "shape", "[", "1", "]", ",", "m", ")", ",", "dtype", "=", "dtype", ")", "for", "i", ",", "col", "in", "enumerate", "(", "V", ".", "T", ")", ":", "out", "[", "i", ",", ":", "]", "=", "A", ".", "rmatvec", "(", "col", ".", "reshape", "(", "-", "1", ",", "1", ")", ")", ".", "T", "return", "out", ".", "T", "else", ":", "if", "n", ">", "m", ":", "X_dot", "=", "X_matmat", "=", "A", ".", "dot", "XH_dot", "=", "_herm", "(", "A", ")", ".", "dot", "else", ":", "XH_dot", "=", "A", ".", "dot", "X_dot", "=", "X_matmat", "=", "_herm", "(", "A", ")", ".", "dot", "def", "matvec_XH_X", "(", "x", ")", ":", "return", "XH_dot", "(", "X_dot", "(", "x", ")", ")", "XH_X", "=", "LinearOperator", "(", "matvec", "=", "matvec_XH_X", ",", "dtype", "=", "A", ".", "dtype", ",", "shape", "=", "(", "min", "(", "A", ".", "shape", ")", ",", "min", "(", "A", ".", "shape", ")", ")", ")", "# Get a low rank approximation of the implicitly defined gramian matrix.", "# This is not a stable way to approach the problem.", "eigvals", ",", "eigvec", "=", "eigsh", "(", "XH_X", ",", "k", "=", "k", ",", "tol", "=", "tol", "**", "2", ",", "maxiter", "=", "maxiter", ",", "ncv", "=", "ncv", ",", "which", "=", "which", ",", "v0", "=", "v0", ")", "# In 'LM' mode try to be clever about small eigenvalues.", "# Otherwise in 'SM' mode do not try to be clever.", "if", "which", "==", "'LM'", ":", "# Gramian matrices have real non-negative eigenvalues.", "eigvals", "=", "np", ".", "maximum", "(", "eigvals", ".", "real", ",", "0", ")", "# Use the sophisticated detection of small eigenvalues from pinvh.", "t", "=", "eigvec", ".", "dtype", ".", "char", ".", "lower", "(", ")", "factor", "=", "{", "'f'", ":", "1E3", ",", "'d'", ":", "1E6", "}", "cond", "=", "factor", "[", "t", "]", "*", "np", ".", "finfo", "(", "t", ")", ".", "eps", "cutoff", "=", "cond", "*", "np", ".", "max", "(", "eigvals", ")", "# Get a mask indicating which eigenpairs are not degenerately tiny,", "# and create the re-ordered array of thresholded singular values.", "above_cutoff", "=", "(", "eigvals", ">", "cutoff", ")", "nlarge", "=", "above_cutoff", ".", "sum", "(", ")", "nsmall", "=", "k", "-", "nlarge", "slarge", "=", "np", ".", "sqrt", "(", "eigvals", "[", "above_cutoff", "]", ")", "s", "=", "np", ".", "zeros_like", "(", "eigvals", ")", "s", "[", ":", "nlarge", "]", "=", "slarge", "if", "not", "return_singular_vectors", ":", "return", "s", "if", "n", ">", "m", ":", "vlarge", "=", "eigvec", "[", ":", ",", "above_cutoff", "]", "ularge", "=", "X_matmat", "(", "vlarge", ")", "/", "slarge", "if", "return_singular_vectors", "!=", "'vh'", "else", "None", "vhlarge", "=", "_herm", "(", "vlarge", ")", "else", ":", "ularge", "=", "eigvec", "[", ":", ",", "above_cutoff", "]", "vhlarge", "=", "_herm", "(", "X_matmat", "(", "ularge", ")", "/", "slarge", ")", "if", "return_singular_vectors", "!=", "'u'", "else", "None", "u", "=", "_augmented_orthonormal_cols", "(", "ularge", ",", "nsmall", ")", "if", "ularge", "is", "not", "None", "else", "None", "vh", "=", "_augmented_orthonormal_rows", "(", "vhlarge", ",", "nsmall", ")", "if", "vhlarge", "is", "not", "None", "else", "None", "elif", "which", "==", "'SM'", ":", "s", "=", "np", ".", "sqrt", "(", "eigvals", ")", "if", "not", "return_singular_vectors", ":", "return", "s", "if", "n", ">", "m", ":", "v", "=", "eigvec", "u", "=", "X_matmat", "(", "v", ")", "/", "s", "if", "return_singular_vectors", "!=", "'vh'", "else", "None", "vh", "=", "_herm", "(", "v", ")", "else", ":", "u", "=", "eigvec", "vh", "=", "_herm", "(", "X_matmat", "(", "u", ")", "/", "s", ")", "if", "return_singular_vectors", "!=", "'u'", "else", "None", "else", ":", "raise", "ValueError", "(", "\"which must be either 'LM' or 'SM'.\"", ")", "return", "u", ",", "s", ",", "vh" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/sparse/linalg/eigen/arpack/arpack.py#L1642-L1809
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/symbol/numpy/_symbol.py
python
bitwise_not
(x, out=None, **kwargs)
return _unary_func_helper(x, _npi.bitwise_not, _np.bitwise_not, out=out, **kwargs)
r""" Compute bit-wise inversion, or bit-wise NOT, element-wise. Computes the bit-wise NOT of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``~``. Parameters ---------- x : array_like Only integer and boolean types are handled. out : ndarray, None, or tuple of ndarray and None, optional A location into which the result is stored. If provided, it must have a shape that the inputs broadcast to. If not provided or `None`, a freshly-allocated array is returned. A tuple (possible only as a keyword argument) must have length equal to the number of outputs. Returns ------- out : ndarray or scalar Result. This is a scalar if `x` is a scalar. See Also -------- bitwise_and, bitwise_or, bitwise_xor logical_not binary_repr : Return the binary representation of the input number as a string. Examples -------- We've seen that 13 is represented by ``00001101``. The invert or bit-wise NOT of 13 is then: >>> x = np.invert(np.array(13, dtype=np.uint8)) >>> x 242 >>> np.binary_repr(x, width=8) '11110010' Notes ----- `bitwise_not` is an alias for `invert`: >>> np.bitwise_not is np.invert True
r""" Compute bit-wise inversion, or bit-wise NOT, element-wise. Computes the bit-wise NOT of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``~``. Parameters ---------- x : array_like Only integer and boolean types are handled. out : ndarray, None, or tuple of ndarray and None, optional A location into which the result is stored. If provided, it must have a shape that the inputs broadcast to. If not provided or `None`, a freshly-allocated array is returned. A tuple (possible only as a keyword argument) must have length equal to the number of outputs. Returns ------- out : ndarray or scalar Result. This is a scalar if `x` is a scalar. See Also -------- bitwise_and, bitwise_or, bitwise_xor logical_not binary_repr : Return the binary representation of the input number as a string. Examples -------- We've seen that 13 is represented by ``00001101``. The invert or bit-wise NOT of 13 is then: >>> x = np.invert(np.array(13, dtype=np.uint8)) >>> x 242 >>> np.binary_repr(x, width=8) '11110010' Notes ----- `bitwise_not` is an alias for `invert`: >>> np.bitwise_not is np.invert True
[ "r", "Compute", "bit", "-", "wise", "inversion", "or", "bit", "-", "wise", "NOT", "element", "-", "wise", ".", "Computes", "the", "bit", "-", "wise", "NOT", "of", "the", "underlying", "binary", "representation", "of", "the", "integers", "in", "the", "input", "arrays", ".", "This", "ufunc", "implements", "the", "C", "/", "Python", "operator", "~", ".", "Parameters", "----------", "x", ":", "array_like", "Only", "integer", "and", "boolean", "types", "are", "handled", ".", "out", ":", "ndarray", "None", "or", "tuple", "of", "ndarray", "and", "None", "optional", "A", "location", "into", "which", "the", "result", "is", "stored", ".", "If", "provided", "it", "must", "have", "a", "shape", "that", "the", "inputs", "broadcast", "to", ".", "If", "not", "provided", "or", "None", "a", "freshly", "-", "allocated", "array", "is", "returned", ".", "A", "tuple", "(", "possible", "only", "as", "a", "keyword", "argument", ")", "must", "have", "length", "equal", "to", "the", "number", "of", "outputs", ".", "Returns", "-------", "out", ":", "ndarray", "or", "scalar", "Result", ".", "This", "is", "a", "scalar", "if", "x", "is", "a", "scalar", ".", "See", "Also", "--------", "bitwise_and", "bitwise_or", "bitwise_xor", "logical_not", "binary_repr", ":", "Return", "the", "binary", "representation", "of", "the", "input", "number", "as", "a", "string", ".", "Examples", "--------", "We", "ve", "seen", "that", "13", "is", "represented", "by", "00001101", ".", "The", "invert", "or", "bit", "-", "wise", "NOT", "of", "13", "is", "then", ":", ">>>", "x", "=", "np", ".", "invert", "(", "np", ".", "array", "(", "13", "dtype", "=", "np", ".", "uint8", "))", ">>>", "x", "242", ">>>", "np", ".", "binary_repr", "(", "x", "width", "=", "8", ")", "11110010", "Notes", "-----", "bitwise_not", "is", "an", "alias", "for", "invert", ":", ">>>", "np", ".", "bitwise_not", "is", "np", ".", "invert", "True" ]
def bitwise_not(x, out=None, **kwargs): r""" Compute bit-wise inversion, or bit-wise NOT, element-wise. Computes the bit-wise NOT of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator ``~``. Parameters ---------- x : array_like Only integer and boolean types are handled. out : ndarray, None, or tuple of ndarray and None, optional A location into which the result is stored. If provided, it must have a shape that the inputs broadcast to. If not provided or `None`, a freshly-allocated array is returned. A tuple (possible only as a keyword argument) must have length equal to the number of outputs. Returns ------- out : ndarray or scalar Result. This is a scalar if `x` is a scalar. See Also -------- bitwise_and, bitwise_or, bitwise_xor logical_not binary_repr : Return the binary representation of the input number as a string. Examples -------- We've seen that 13 is represented by ``00001101``. The invert or bit-wise NOT of 13 is then: >>> x = np.invert(np.array(13, dtype=np.uint8)) >>> x 242 >>> np.binary_repr(x, width=8) '11110010' Notes ----- `bitwise_not` is an alias for `invert`: >>> np.bitwise_not is np.invert True """ return _unary_func_helper(x, _npi.bitwise_not, _np.bitwise_not, out=out, **kwargs)
[ "def", "bitwise_not", "(", "x", ",", "out", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "_unary_func_helper", "(", "x", ",", "_npi", ".", "bitwise_not", ",", "_np", ".", "bitwise_not", ",", "out", "=", "out", ",", "*", "*", "kwargs", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/symbol/numpy/_symbol.py#L1202-L1243
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_gdi.py
python
GraphicsPath.Transform
(*args, **kwargs)
return _gdi_.GraphicsPath_Transform(*args, **kwargs)
Transform(self, GraphicsMatrix matrix) Transforms each point of this path by the matrix
Transform(self, GraphicsMatrix matrix)
[ "Transform", "(", "self", "GraphicsMatrix", "matrix", ")" ]
def Transform(*args, **kwargs): """ Transform(self, GraphicsMatrix matrix) Transforms each point of this path by the matrix """ return _gdi_.GraphicsPath_Transform(*args, **kwargs)
[ "def", "Transform", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "GraphicsPath_Transform", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L6010-L6016
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/core/arrays/interval.py
python
_maybe_convert_platform_interval
(values)
return values
Try to do platform conversion, with special casing for IntervalArray. Wrapper around maybe_convert_platform that alters the default return dtype in certain cases to be compatible with IntervalArray. For example, empty lists return with integer dtype instead of object dtype, which is prohibited for IntervalArray. Parameters ---------- values : array-like Returns ------- array
Try to do platform conversion, with special casing for IntervalArray. Wrapper around maybe_convert_platform that alters the default return dtype in certain cases to be compatible with IntervalArray. For example, empty lists return with integer dtype instead of object dtype, which is prohibited for IntervalArray.
[ "Try", "to", "do", "platform", "conversion", "with", "special", "casing", "for", "IntervalArray", ".", "Wrapper", "around", "maybe_convert_platform", "that", "alters", "the", "default", "return", "dtype", "in", "certain", "cases", "to", "be", "compatible", "with", "IntervalArray", ".", "For", "example", "empty", "lists", "return", "with", "integer", "dtype", "instead", "of", "object", "dtype", "which", "is", "prohibited", "for", "IntervalArray", "." ]
def _maybe_convert_platform_interval(values) -> ArrayLike: """ Try to do platform conversion, with special casing for IntervalArray. Wrapper around maybe_convert_platform that alters the default return dtype in certain cases to be compatible with IntervalArray. For example, empty lists return with integer dtype instead of object dtype, which is prohibited for IntervalArray. Parameters ---------- values : array-like Returns ------- array """ if isinstance(values, (list, tuple)) and len(values) == 0: # GH 19016 # empty lists/tuples get object dtype by default, but this is # prohibited for IntervalArray, so coerce to integer instead return np.array([], dtype=np.int64) elif not is_list_like(values) or isinstance(values, ABCDataFrame): # This will raise later, but we avoid passing to maybe_convert_platform return values elif is_categorical_dtype(values): values = np.asarray(values) elif not hasattr(values, "dtype") and not isinstance(values, (list, tuple, range)): # TODO: should we just cast these to list? return values else: values = extract_array(values, extract_numpy=True) if not hasattr(values, "dtype"): return np.asarray(values) return values
[ "def", "_maybe_convert_platform_interval", "(", "values", ")", "->", "ArrayLike", ":", "if", "isinstance", "(", "values", ",", "(", "list", ",", "tuple", ")", ")", "and", "len", "(", "values", ")", "==", "0", ":", "# GH 19016", "# empty lists/tuples get object dtype by default, but this is", "# prohibited for IntervalArray, so coerce to integer instead", "return", "np", ".", "array", "(", "[", "]", ",", "dtype", "=", "np", ".", "int64", ")", "elif", "not", "is_list_like", "(", "values", ")", "or", "isinstance", "(", "values", ",", "ABCDataFrame", ")", ":", "# This will raise later, but we avoid passing to maybe_convert_platform", "return", "values", "elif", "is_categorical_dtype", "(", "values", ")", ":", "values", "=", "np", ".", "asarray", "(", "values", ")", "elif", "not", "hasattr", "(", "values", ",", "\"dtype\"", ")", "and", "not", "isinstance", "(", "values", ",", "(", "list", ",", "tuple", ",", "range", ")", ")", ":", "# TODO: should we just cast these to list?", "return", "values", "else", ":", "values", "=", "extract_array", "(", "values", ",", "extract_numpy", "=", "True", ")", "if", "not", "hasattr", "(", "values", ",", "\"dtype\"", ")", ":", "return", "np", ".", "asarray", "(", "values", ")", "return", "values" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/arrays/interval.py#L1638-L1672
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Cipher/_mode_ccm.py
python
CcmMode.hexdigest
(self)
return "".join(["%02x" % bord(x) for x in self.digest()])
Compute the *printable* MAC tag. This method is like `digest`. :Return: the MAC, as a hexadecimal string.
Compute the *printable* MAC tag.
[ "Compute", "the", "*", "printable", "*", "MAC", "tag", "." ]
def hexdigest(self): """Compute the *printable* MAC tag. This method is like `digest`. :Return: the MAC, as a hexadecimal string. """ return "".join(["%02x" % bord(x) for x in self.digest()])
[ "def", "hexdigest", "(", "self", ")", ":", "return", "\"\"", ".", "join", "(", "[", "\"%02x\"", "%", "bord", "(", "x", ")", "for", "x", "in", "self", ".", "digest", "(", ")", "]", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Cipher/_mode_ccm.py#L500-L507
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/tensor_forest/python/tensor_forest.py
python
RandomTreeGraphs.inference_graph
(self, input_data, data_spec, sparse_features=None)
return model_ops.tree_predictions_v4( self.variables.tree, input_data, sparse_indices, sparse_values, sparse_shape, input_spec=data_spec.SerializeToString(), params=self.params.serialized_params_proto)
Constructs a TF graph for evaluating a random tree. Args: input_data: A tensor or placeholder for input data. data_spec: A TensorForestDataSpec proto specifying the original input columns. sparse_features: A tf.SparseTensor for sparse input data. Returns: A tuple of (probabilities, tree_paths).
Constructs a TF graph for evaluating a random tree.
[ "Constructs", "a", "TF", "graph", "for", "evaluating", "a", "random", "tree", "." ]
def inference_graph(self, input_data, data_spec, sparse_features=None): """Constructs a TF graph for evaluating a random tree. Args: input_data: A tensor or placeholder for input data. data_spec: A TensorForestDataSpec proto specifying the original input columns. sparse_features: A tf.SparseTensor for sparse input data. Returns: A tuple of (probabilities, tree_paths). """ sparse_indices = [] sparse_values = [] sparse_shape = [] if sparse_features is not None: sparse_indices = sparse_features.indices sparse_values = sparse_features.values sparse_shape = sparse_features.dense_shape if input_data is None: input_data = [] return model_ops.tree_predictions_v4( self.variables.tree, input_data, sparse_indices, sparse_values, sparse_shape, input_spec=data_spec.SerializeToString(), params=self.params.serialized_params_proto)
[ "def", "inference_graph", "(", "self", ",", "input_data", ",", "data_spec", ",", "sparse_features", "=", "None", ")", ":", "sparse_indices", "=", "[", "]", "sparse_values", "=", "[", "]", "sparse_shape", "=", "[", "]", "if", "sparse_features", "is", "not", "None", ":", "sparse_indices", "=", "sparse_features", ".", "indices", "sparse_values", "=", "sparse_features", ".", "values", "sparse_shape", "=", "sparse_features", ".", "dense_shape", "if", "input_data", "is", "None", ":", "input_data", "=", "[", "]", "return", "model_ops", ".", "tree_predictions_v4", "(", "self", ".", "variables", ".", "tree", ",", "input_data", ",", "sparse_indices", ",", "sparse_values", ",", "sparse_shape", ",", "input_spec", "=", "data_spec", ".", "SerializeToString", "(", ")", ",", "params", "=", "self", ".", "params", ".", "serialized_params_proto", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/tensor_forest/python/tensor_forest.py#L651-L680
google/iree
1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76
llvm-external-projects/iree-dialects/python/iree/compiler/dialects/iree_pydm/importer/util.py
python
ImportContext.box
(self, value: ir.Value, to_typed: Optional[bool] = True)
Boxes a value if necessary.
Boxes a value if necessary.
[ "Boxes", "a", "value", "if", "necessary", "." ]
def box(self, value: ir.Value, to_typed: Optional[bool] = True) -> ir.Value: """Boxes a value if necessary.""" with self.ip, self.loc: t = value.type if d.ObjectType.isinstance(t): # Already boxed. return value boxed_type = d.ObjectType.get_typed(t) if to_typed else d.ObjectType.get() return d.BoxOp(boxed_type, value).result
[ "def", "box", "(", "self", ",", "value", ":", "ir", ".", "Value", ",", "to_typed", ":", "Optional", "[", "bool", "]", "=", "True", ")", "->", "ir", ".", "Value", ":", "with", "self", ".", "ip", ",", "self", ".", "loc", ":", "t", "=", "value", ".", "type", "if", "d", ".", "ObjectType", ".", "isinstance", "(", "t", ")", ":", "# Already boxed.", "return", "value", "boxed_type", "=", "d", ".", "ObjectType", ".", "get_typed", "(", "t", ")", "if", "to_typed", "else", "d", ".", "ObjectType", ".", "get", "(", ")", "return", "d", ".", "BoxOp", "(", "boxed_type", ",", "value", ")", ".", "result" ]
https://github.com/google/iree/blob/1224bbdbe65b0d1fdf40e7324f60f68beeaf7c76/llvm-external-projects/iree-dialects/python/iree/compiler/dialects/iree_pydm/importer/util.py#L117-L125
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/atoms/elementwise/abs.py
python
abs.is_atom_concave
(self)
return False
Is the atom concave?
Is the atom concave?
[ "Is", "the", "atom", "concave?" ]
def is_atom_concave(self) -> bool: """Is the atom concave? """ return False
[ "def", "is_atom_concave", "(", "self", ")", "->", "bool", ":", "return", "False" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/elementwise/abs.py#L47-L50
netket/netket
0d534e54ecbf25b677ea72af6b85947979420652
netket/graph/common_lattices.py
python
FCC
( extent: Sequence[int], *, pbc: Union[bool, Sequence[bool]] = True, **kwargs )
return Lattice( basis_vectors=basis, extent=extent, pbc=pbc, point_group=point_group, **kwargs )
Constructs an FCC lattice of a given spatial extent. Periodic boundary conditions can also be imposed Sites are returned at the Bravais lattice points. Arguments: extent: Number of primitive unit cells along each direction, needs to be an array of length 3 pbc: If `True`, the lattice will have periodic boundary conditions (PBC); if `False`, the lattice will have open boundary conditions (OBC). This parameter can also be a list of booleans with same length as the parameter `length`, in which case each dimension will have PBC/OBC depending on the corresponding entry of `pbc`. kwargs: Additional keyword arguments are passed on to the constructor of :ref:`netket.graph.Lattice`. Example: Construct an FCC lattice with 3×3×3 primitive unit cells: >>> from netket.graph import FCC >>> g = FCC(extent=[3,3,3]) >>> print(g.n_nodes) 27
Constructs an FCC lattice of a given spatial extent. Periodic boundary conditions can also be imposed Sites are returned at the Bravais lattice points.
[ "Constructs", "an", "FCC", "lattice", "of", "a", "given", "spatial", "extent", ".", "Periodic", "boundary", "conditions", "can", "also", "be", "imposed", "Sites", "are", "returned", "at", "the", "Bravais", "lattice", "points", "." ]
def FCC( extent: Sequence[int], *, pbc: Union[bool, Sequence[bool]] = True, **kwargs ) -> Lattice: """Constructs an FCC lattice of a given spatial extent. Periodic boundary conditions can also be imposed Sites are returned at the Bravais lattice points. Arguments: extent: Number of primitive unit cells along each direction, needs to be an array of length 3 pbc: If `True`, the lattice will have periodic boundary conditions (PBC); if `False`, the lattice will have open boundary conditions (OBC). This parameter can also be a list of booleans with same length as the parameter `length`, in which case each dimension will have PBC/OBC depending on the corresponding entry of `pbc`. kwargs: Additional keyword arguments are passed on to the constructor of :ref:`netket.graph.Lattice`. Example: Construct an FCC lattice with 3×3×3 primitive unit cells: >>> from netket.graph import FCC >>> g = FCC(extent=[3,3,3]) >>> print(g.n_nodes) 27 """ basis = [[0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]] # determine if full point group is realised by the simulation box point_group = cubic.Oh() if np.all(pbc) and len(set(extent)) == 1 else None return Lattice( basis_vectors=basis, extent=extent, pbc=pbc, point_group=point_group, **kwargs )
[ "def", "FCC", "(", "extent", ":", "Sequence", "[", "int", "]", ",", "*", ",", "pbc", ":", "Union", "[", "bool", ",", "Sequence", "[", "bool", "]", "]", "=", "True", ",", "*", "*", "kwargs", ")", "->", "Lattice", ":", "basis", "=", "[", "[", "0", ",", "0.5", ",", "0.5", "]", ",", "[", "0.5", ",", "0", ",", "0.5", "]", ",", "[", "0.5", ",", "0.5", ",", "0", "]", "]", "# determine if full point group is realised by the simulation box", "point_group", "=", "cubic", ".", "Oh", "(", ")", "if", "np", ".", "all", "(", "pbc", ")", "and", "len", "(", "set", "(", "extent", ")", ")", "==", "1", "else", "None", "return", "Lattice", "(", "basis_vectors", "=", "basis", ",", "extent", "=", "extent", ",", "pbc", "=", "pbc", ",", "point_group", "=", "point_group", ",", "*", "*", "kwargs", ")" ]
https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/graph/common_lattices.py#L272-L304
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/framework/tensor_shape.py
python
Dimension.__eq__
(self, other)
return self._value == other.value
Returns true if `other` has the same known value as this Dimension.
Returns true if `other` has the same known value as this Dimension.
[ "Returns", "true", "if", "other", "has", "the", "same", "known", "value", "as", "this", "Dimension", "." ]
def __eq__(self, other): """Returns true if `other` has the same known value as this Dimension.""" try: other = as_dimension(other) except (TypeError, ValueError): return NotImplemented if self._value is None or other.value is None: return None return self._value == other.value
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "try", ":", "other", "=", "as_dimension", "(", "other", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "NotImplemented", "if", "self", ".", "_value", "is", "None", "or", "other", ".", "value", "is", "None", ":", "return", "None", "return", "self", ".", "_value", "==", "other", ".", "value" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/framework/tensor_shape.py#L46-L54
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/ma/core.py
python
compressed
(x)
return asanyarray(x).compressed()
Return all the non-masked data as a 1-D array. This function is equivalent to calling the "compressed" method of a `MaskedArray`, see `MaskedArray.compressed` for details. See Also -------- MaskedArray.compressed Equivalent method.
Return all the non-masked data as a 1-D array.
[ "Return", "all", "the", "non", "-", "masked", "data", "as", "a", "1", "-", "D", "array", "." ]
def compressed(x): """ Return all the non-masked data as a 1-D array. This function is equivalent to calling the "compressed" method of a `MaskedArray`, see `MaskedArray.compressed` for details. See Also -------- MaskedArray.compressed Equivalent method. """ return asanyarray(x).compressed()
[ "def", "compressed", "(", "x", ")", ":", "return", "asanyarray", "(", "x", ")", ".", "compressed", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/ma/core.py#L6811-L6824
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/gyp/pylib/gyp/xcodeproj_file.py
python
XCObject.EnsureNoIDCollisions
(self)
Verifies that no two objects have the same ID. Checks all descendants.
Verifies that no two objects have the same ID. Checks all descendants.
[ "Verifies", "that", "no", "two", "objects", "have", "the", "same", "ID", ".", "Checks", "all", "descendants", "." ]
def EnsureNoIDCollisions(self): """Verifies that no two objects have the same ID. Checks all descendants. """ ids = {} descendants = self.Descendants() for descendant in descendants: if descendant.id in ids: other = ids[descendant.id] raise KeyError( 'Duplicate ID %s, objects "%s" and "%s" in "%s"' % \ (descendant.id, str(descendant._properties), str(other._properties), self._properties['rootObject'].Name())) ids[descendant.id] = descendant
[ "def", "EnsureNoIDCollisions", "(", "self", ")", ":", "ids", "=", "{", "}", "descendants", "=", "self", ".", "Descendants", "(", ")", "for", "descendant", "in", "descendants", ":", "if", "descendant", ".", "id", "in", "ids", ":", "other", "=", "ids", "[", "descendant", ".", "id", "]", "raise", "KeyError", "(", "'Duplicate ID %s, objects \"%s\" and \"%s\" in \"%s\"'", "%", "(", "descendant", ".", "id", ",", "str", "(", "descendant", ".", "_properties", ")", ",", "str", "(", "other", ".", "_properties", ")", ",", "self", ".", "_properties", "[", "'rootObject'", "]", ".", "Name", "(", ")", ")", ")", "ids", "[", "descendant", ".", "id", "]", "=", "descendant" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/xcodeproj_file.py#L455-L468
pytorch/pytorch
7176c92687d3cc847cc046bf002269c6949a21c2
torch/fx/graph.py
python
_Namespace.associate_name_with_obj
(self, name: str, obj: Any)
Associate a unique name with an object. Neither `name` nor `obj` should be associated already.
Associate a unique name with an object.
[ "Associate", "a", "unique", "name", "with", "an", "object", "." ]
def associate_name_with_obj(self, name: str, obj: Any): """Associate a unique name with an object. Neither `name` nor `obj` should be associated already. """ assert obj not in self._obj_to_name assert name in self._unassociated_names self._obj_to_name[obj] = name self._unassociated_names.remove(name)
[ "def", "associate_name_with_obj", "(", "self", ",", "name", ":", "str", ",", "obj", ":", "Any", ")", ":", "assert", "obj", "not", "in", "self", ".", "_obj_to_name", "assert", "name", "in", "self", ".", "_unassociated_names", "self", ".", "_obj_to_name", "[", "obj", "]", "=", "name", "self", ".", "_unassociated_names", ".", "remove", "(", "name", ")" ]
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/fx/graph.py#L161-L169
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/eclib/_filetree.py
python
FileTree.EnableLabelEditing
(self, enable=True)
Enable/Disable label editing. This functionality is enabled by default. @keyword enable: bool
Enable/Disable label editing. This functionality is enabled by default. @keyword enable: bool
[ "Enable", "/", "Disable", "label", "editing", ".", "This", "functionality", "is", "enabled", "by", "default", ".", "@keyword", "enable", ":", "bool" ]
def EnableLabelEditing(self, enable=True): """Enable/Disable label editing. This functionality is enabled by default. @keyword enable: bool """ self._editlabels = enable
[ "def", "EnableLabelEditing", "(", "self", ",", "enable", "=", "True", ")", ":", "self", ".", "_editlabels", "=", "enable" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/eclib/_filetree.py#L347-L353
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/seacas/scripts/exodus2.in.py
python
exodus.put_face_count_per_polyhedra
(self, blkID, entityCounts)
return True
status = exo.put_face_count_per_polyhedra(blkID, entityCounts) -> put in a count of faces in for each polyhedra in an elem block input values: <int> blkID id of the block to be added if array_type == 'ctype': <list<float>> entityCounts if array_type == 'numpy': <np_array<double>> entityCounts return value(s): <bool> status True = successful execution
status = exo.put_face_count_per_polyhedra(blkID, entityCounts)
[ "status", "=", "exo", ".", "put_face_count_per_polyhedra", "(", "blkID", "entityCounts", ")" ]
def put_face_count_per_polyhedra(self, blkID, entityCounts): """ status = exo.put_face_count_per_polyhedra(blkID, entityCounts) -> put in a count of faces in for each polyhedra in an elem block input values: <int> blkID id of the block to be added if array_type == 'ctype': <list<float>> entityCounts if array_type == 'numpy': <np_array<double>> entityCounts return value(s): <bool> status True = successful execution """ ebType = ex_entity_type("EX_ELEM_BLOCK") entity_counts = (c_int * len(entityCounts))() entity_counts[:] = entityCounts EXODUS_LIB.ex_put_entity_count_per_polyhedra( self.fileId, ebType, c_int(blkID), entity_counts) return True
[ "def", "put_face_count_per_polyhedra", "(", "self", ",", "blkID", ",", "entityCounts", ")", ":", "ebType", "=", "ex_entity_type", "(", "\"EX_ELEM_BLOCK\"", ")", "entity_counts", "=", "(", "c_int", "*", "len", "(", "entityCounts", ")", ")", "(", ")", "entity_counts", "[", ":", "]", "=", "entityCounts", "EXODUS_LIB", ".", "ex_put_entity_count_per_polyhedra", "(", "self", ".", "fileId", ",", "ebType", ",", "c_int", "(", "blkID", ")", ",", "entity_counts", ")", "return", "True" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exodus2.in.py#L3388-L3411
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
chrome/tools/build/win/scan_server_dlls.py
python
ScanServerDlls
(config, distribution, output_dir)
return registered_dll_list
Scans for DLLs in the specified section of config that are in the subdirectory of output_dir named SERVERS_DIR. Returns a list of only the filename components of the paths to all matching DLLs.
Scans for DLLs in the specified section of config that are in the subdirectory of output_dir named SERVERS_DIR. Returns a list of only the filename components of the paths to all matching DLLs.
[ "Scans", "for", "DLLs", "in", "the", "specified", "section", "of", "config", "that", "are", "in", "the", "subdirectory", "of", "output_dir", "named", "SERVERS_DIR", ".", "Returns", "a", "list", "of", "only", "the", "filename", "components", "of", "the", "paths", "to", "all", "matching", "DLLs", "." ]
def ScanServerDlls(config, distribution, output_dir): """Scans for DLLs in the specified section of config that are in the subdirectory of output_dir named SERVERS_DIR. Returns a list of only the filename components of the paths to all matching DLLs. """ print "Scanning for server DLLs in " + output_dir registered_dll_list = [] ScanDllsInSection(config, 'GENERAL', output_dir, registered_dll_list) if distribution: if len(distribution) > 1 and distribution[0] == '_': distribution = distribution[1:] ScanDllsInSection(config, distribution.upper(), output_dir, registered_dll_list) return registered_dll_list
[ "def", "ScanServerDlls", "(", "config", ",", "distribution", ",", "output_dir", ")", ":", "print", "\"Scanning for server DLLs in \"", "+", "output_dir", "registered_dll_list", "=", "[", "]", "ScanDllsInSection", "(", "config", ",", "'GENERAL'", ",", "output_dir", ",", "registered_dll_list", ")", "if", "distribution", ":", "if", "len", "(", "distribution", ")", ">", "1", "and", "distribution", "[", "0", "]", "==", "'_'", ":", "distribution", "=", "distribution", "[", "1", ":", "]", "ScanDllsInSection", "(", "config", ",", "distribution", ".", "upper", "(", ")", ",", "output_dir", ",", "registered_dll_list", ")", "return", "registered_dll_list" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/chrome/tools/build/win/scan_server_dlls.py#L86-L102
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/ops/control_flow_ops.py
python
GradLoopState.AddBackPropAccumulatedValue
(self, history_value, value, dead_branch=False)
return pop
Add the getter for an accumulated value in the grad context. This is added to the backprop loop. Called in the grad context to get the value of an accumulated value. The stack pop op must be guarded by the pred of the controlling cond. Args: history_value: The history (a stack) of a value. value: The value that is pushed onto the stack. dead_branch: True iff the tensor is on a dead branch of a cond. Returns: The current value (the top of the stack).
Add the getter for an accumulated value in the grad context.
[ "Add", "the", "getter", "for", "an", "accumulated", "value", "in", "the", "grad", "context", "." ]
def AddBackPropAccumulatedValue(self, history_value, value, dead_branch=False): """Add the getter for an accumulated value in the grad context. This is added to the backprop loop. Called in the grad context to get the value of an accumulated value. The stack pop op must be guarded by the pred of the controlling cond. Args: history_value: The history (a stack) of a value. value: The value that is pushed onto the stack. dead_branch: True iff the tensor is on a dead branch of a cond. Returns: The current value (the top of the stack). """ history_ctxt = history_value.op._get_control_flow_context() # Find the cond context that controls history_value. cond_ctxt = None value_ctxt = value.op._get_control_flow_context() while value_ctxt and value_ctxt != history_ctxt: if isinstance(value_ctxt, CondContext): cond_ctxt = value_ctxt break value_ctxt = value_ctxt.outer_context with ops.control_dependencies(None): self.grad_context.Enter() if cond_ctxt: # Guard stack pop with a switch if it is controlled by a cond grad_state = self pred = None while pred is None and grad_state: pred = grad_state.history_map.get(cond_ctxt.pred.name) grad_state = grad_state.outer_grad_state branch = (1 - cond_ctxt.branch) if dead_branch else cond_ctxt.branch history_value = _SwitchRefOrTensor(history_value, pred)[branch] pop = gen_data_flow_ops._stack_pop(history_value, value.dtype.base_dtype) self.grad_context.Exit() parallel_iterations = self.grad_context.parallel_iterations if parallel_iterations is not None and parallel_iterations > 1: # All pops are ordered after pivot_for_body and before grad_sync. self.grad_sync._add_control_input(pop.op) return pop
[ "def", "AddBackPropAccumulatedValue", "(", "self", ",", "history_value", ",", "value", ",", "dead_branch", "=", "False", ")", ":", "history_ctxt", "=", "history_value", ".", "op", ".", "_get_control_flow_context", "(", ")", "# Find the cond context that controls history_value.", "cond_ctxt", "=", "None", "value_ctxt", "=", "value", ".", "op", ".", "_get_control_flow_context", "(", ")", "while", "value_ctxt", "and", "value_ctxt", "!=", "history_ctxt", ":", "if", "isinstance", "(", "value_ctxt", ",", "CondContext", ")", ":", "cond_ctxt", "=", "value_ctxt", "break", "value_ctxt", "=", "value_ctxt", ".", "outer_context", "with", "ops", ".", "control_dependencies", "(", "None", ")", ":", "self", ".", "grad_context", ".", "Enter", "(", ")", "if", "cond_ctxt", ":", "# Guard stack pop with a switch if it is controlled by a cond", "grad_state", "=", "self", "pred", "=", "None", "while", "pred", "is", "None", "and", "grad_state", ":", "pred", "=", "grad_state", ".", "history_map", ".", "get", "(", "cond_ctxt", ".", "pred", ".", "name", ")", "grad_state", "=", "grad_state", ".", "outer_grad_state", "branch", "=", "(", "1", "-", "cond_ctxt", ".", "branch", ")", "if", "dead_branch", "else", "cond_ctxt", ".", "branch", "history_value", "=", "_SwitchRefOrTensor", "(", "history_value", ",", "pred", ")", "[", "branch", "]", "pop", "=", "gen_data_flow_ops", ".", "_stack_pop", "(", "history_value", ",", "value", ".", "dtype", ".", "base_dtype", ")", "self", ".", "grad_context", ".", "Exit", "(", ")", "parallel_iterations", "=", "self", ".", "grad_context", ".", "parallel_iterations", "if", "parallel_iterations", "is", "not", "None", "and", "parallel_iterations", ">", "1", ":", "# All pops are ordered after pivot_for_body and before grad_sync.", "self", ".", "grad_sync", ".", "_add_control_input", "(", "pop", ".", "op", ")", "return", "pop" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/control_flow_ops.py#L704-L746
psi4/psi4
be533f7f426b6ccc263904e55122899b16663395
psi4/driver/ipi_broker.py
python
IPIBroker.calculate_gradient
(self, LOT, bypass_scf=False, **kwargs)
Calculate the gradient with @LOT. When bypass_scf=True a hf energy calculation has been done before.
Calculate the gradient with @LOT.
[ "Calculate", "the", "gradient", "with", "@LOT", "." ]
def calculate_gradient(self, LOT, bypass_scf=False, **kwargs): """Calculate the gradient with @LOT. When bypass_scf=True a hf energy calculation has been done before. """ start = time.time() self.grd = psi4.gradient(LOT, bypass_scf=bypass_scf, **kwargs) time_needed = time.time() - start self.timing[LOT] = self.timing.get(LOT, []) + [time_needed]
[ "def", "calculate_gradient", "(", "self", ",", "LOT", ",", "bypass_scf", "=", "False", ",", "*", "*", "kwargs", ")", ":", "start", "=", "time", ".", "time", "(", ")", "self", ".", "grd", "=", "psi4", ".", "gradient", "(", "LOT", ",", "bypass_scf", "=", "bypass_scf", ",", "*", "*", "kwargs", ")", "time_needed", "=", "time", ".", "time", "(", ")", "-", "start", "self", ".", "timing", "[", "LOT", "]", "=", "self", ".", "timing", ".", "get", "(", "LOT", ",", "[", "]", ")", "+", "[", "time_needed", "]" ]
https://github.com/psi4/psi4/blob/be533f7f426b6ccc263904e55122899b16663395/psi4/driver/ipi_broker.py#L118-L126
generalized-intelligence/GAAS
29ab17d3e8a4ba18edef3a57c36d8db6329fac73
deprecated/algorithms/sfm/OpenSfM/opensfm/geo.py
python
TopocentricConverter.__init__
(self, reflat, reflon, refalt)
Init the converter given the reference origin.
Init the converter given the reference origin.
[ "Init", "the", "converter", "given", "the", "reference", "origin", "." ]
def __init__(self, reflat, reflon, refalt): """Init the converter given the reference origin.""" self.lat = reflat self.lon = reflon self.alt = refalt
[ "def", "__init__", "(", "self", ",", "reflat", ",", "reflon", ",", "refalt", ")", ":", "self", ".", "lat", "=", "reflat", "self", ".", "lon", "=", "reflon", "self", ".", "alt", "=", "refalt" ]
https://github.com/generalized-intelligence/GAAS/blob/29ab17d3e8a4ba18edef3a57c36d8db6329fac73/deprecated/algorithms/sfm/OpenSfM/opensfm/geo.py#L154-L158
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/autocomp/autocomp.py
python
MetaCompleter.__call__
(mcs, base, buff)
return obj
Modify the base class with our new methods at time of instantiation.
Modify the base class with our new methods at time of instantiation.
[ "Modify", "the", "base", "class", "with", "our", "new", "methods", "at", "time", "of", "instantiation", "." ]
def __call__(mcs, base, buff): """Modify the base class with our new methods at time of instantiation. """ obj = type.__call__(mcs, base, buff) # Set/override attributes on the new completer object. setattr(obj, 'BaseGetAutoCompList', obj.GetAutoCompList) setattr(obj, 'GetAutoCompList', lambda cmd: GetAutoCompList(obj, cmd)) setattr(obj, 'scomp', simplecomp.Completer(buff)) # Return the new augmented completer return obj
[ "def", "__call__", "(", "mcs", ",", "base", ",", "buff", ")", ":", "obj", "=", "type", ".", "__call__", "(", "mcs", ",", "base", ",", "buff", ")", "# Set/override attributes on the new completer object.", "setattr", "(", "obj", ",", "'BaseGetAutoCompList'", ",", "obj", ".", "GetAutoCompList", ")", "setattr", "(", "obj", ",", "'GetAutoCompList'", ",", "lambda", "cmd", ":", "GetAutoCompList", "(", "obj", ",", "cmd", ")", ")", "setattr", "(", "obj", ",", "'scomp'", ",", "simplecomp", ".", "Completer", "(", "buff", ")", ")", "# Return the new augmented completer", "return", "obj" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/autocomp/autocomp.py#L78-L91
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/Inelastic/Direct/DirectEnergyConversion.py
python
DirectEnergyConversion.prop_man
(self,value)
Assign new instance of direct property manager to provide DirectEnergyConversion parameters
Assign new instance of direct property manager to provide DirectEnergyConversion parameters
[ "Assign", "new", "instance", "of", "direct", "property", "manager", "to", "provide", "DirectEnergyConversion", "parameters" ]
def prop_man(self,value): """ Assign new instance of direct property manager to provide DirectEnergyConversion parameters """ if isinstance(value,PropertyManager): self._propMan = value else: raise KeyError("Property manager can be initialized by an instance of ProperyManager only")
[ "def", "prop_man", "(", "self", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "PropertyManager", ")", ":", "self", ".", "_propMan", "=", "value", "else", ":", "raise", "KeyError", "(", "\"Property manager can be initialized by an instance of ProperyManager only\"", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Inelastic/Direct/DirectEnergyConversion.py#L1337-L1342
learnforpractice/pyeos
4f04eb982c86c1fdb413084af77c713a6fda3070
libraries/vm/vm_cpython_ss/lib/codecs.py
python
StreamReaderWriter.__next__
(self)
return next(self.reader)
Return the next decoded line from the input stream.
Return the next decoded line from the input stream.
[ "Return", "the", "next", "decoded", "line", "from", "the", "input", "stream", "." ]
def __next__(self): """ Return the next decoded line from the input stream.""" return next(self.reader)
[ "def", "__next__", "(", "self", ")", ":", "return", "next", "(", "self", ".", "reader", ")" ]
https://github.com/learnforpractice/pyeos/blob/4f04eb982c86c1fdb413084af77c713a6fda3070/libraries/vm/vm_cpython_ss/lib/codecs.py#L708-L711
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
samples/networking/03-distributed-node/ClientRepository.py
python
GameClientRepository.lostConnection
(self)
This should be overridden by a derived class to handle an unexpectedly lost connection to the gameserver.
This should be overridden by a derived class to handle an unexpectedly lost connection to the gameserver.
[ "This", "should", "be", "overridden", "by", "a", "derived", "class", "to", "handle", "an", "unexpectedly", "lost", "connection", "to", "the", "gameserver", "." ]
def lostConnection(self): """ This should be overridden by a derived class to handle an unexpectedly lost connection to the gameserver. """ # Handle the disconnection from the server. This can be a reconnect, # simply exiting the application or anything else. exit()
[ "def", "lostConnection", "(", "self", ")", ":", "# Handle the disconnection from the server. This can be a reconnect,", "# simply exiting the application or anything else.", "exit", "(", ")" ]
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/samples/networking/03-distributed-node/ClientRepository.py#L38-L43
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Hash/CMAC.py
python
CMAC.update
(self, msg)
return self
Authenticate the next chunk of message. Args: data (byte string/byte array/memoryview): The next chunk of data
Authenticate the next chunk of message.
[ "Authenticate", "the", "next", "chunk", "of", "message", "." ]
def update(self, msg): """Authenticate the next chunk of message. Args: data (byte string/byte array/memoryview): The next chunk of data """ if self._mac_tag is not None and not self._update_after_digest: raise TypeError("update() cannot be called after digest() or verify()") self._data_size += len(msg) bs = self._block_size if self._cache_n > 0: filler = min(bs - self._cache_n, len(msg)) self._cache[self._cache_n:self._cache_n+filler] = msg[:filler] self._cache_n += filler if self._cache_n < bs: return self msg = memoryview(msg)[filler:] self._update(self._cache) self._cache_n = 0 remain = len(msg) % bs if remain > 0: self._update(msg[:-remain]) self._cache[:remain] = msg[-remain:] else: self._update(msg) self._cache_n = remain return self
[ "def", "update", "(", "self", ",", "msg", ")", ":", "if", "self", ".", "_mac_tag", "is", "not", "None", "and", "not", "self", ".", "_update_after_digest", ":", "raise", "TypeError", "(", "\"update() cannot be called after digest() or verify()\"", ")", "self", ".", "_data_size", "+=", "len", "(", "msg", ")", "bs", "=", "self", ".", "_block_size", "if", "self", ".", "_cache_n", ">", "0", ":", "filler", "=", "min", "(", "bs", "-", "self", ".", "_cache_n", ",", "len", "(", "msg", ")", ")", "self", ".", "_cache", "[", "self", ".", "_cache_n", ":", "self", ".", "_cache_n", "+", "filler", "]", "=", "msg", "[", ":", "filler", "]", "self", ".", "_cache_n", "+=", "filler", "if", "self", ".", "_cache_n", "<", "bs", ":", "return", "self", "msg", "=", "memoryview", "(", "msg", ")", "[", "filler", ":", "]", "self", ".", "_update", "(", "self", ".", "_cache", ")", "self", ".", "_cache_n", "=", "0", "remain", "=", "len", "(", "msg", ")", "%", "bs", "if", "remain", ">", "0", ":", "self", ".", "_update", "(", "msg", "[", ":", "-", "remain", "]", ")", "self", ".", "_cache", "[", ":", "remain", "]", "=", "msg", "[", "-", "remain", ":", "]", "else", ":", "self", ".", "_update", "(", "msg", ")", "self", ".", "_cache_n", "=", "remain", "return", "self" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Hash/CMAC.py#L114-L146
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Protocol/KDF.py
python
PBKDF1
(password, salt, dkLen, count=1000, hashAlgo=None)
return pHash.digest()[:dkLen]
Derive one key from a password (or passphrase). This function performs key derivation according to an old version of the PKCS#5 standard (v1.5) or `RFC2898 <https://www.ietf.org/rfc/rfc2898.txt>`_. Args: password (string): The secret password to generate the key from. salt (byte string): An 8 byte string to use for better protection from dictionary attacks. This value does not need to be kept secret, but it should be randomly chosen for each derivation. dkLen (integer): The length of the desired key. The default is 16 bytes, suitable for instance for :mod:`Crypto.Cipher.AES`. count (integer): The number of iterations to carry out. The recommendation is 1000 or more. hashAlgo (module): The hash algorithm to use, as a module or an object from the :mod:`Crypto.Hash` package. The digest length must be no shorter than ``dkLen``. The default algorithm is :mod:`Crypto.Hash.SHA1`. Return: A byte string of length ``dkLen`` that can be used as key.
Derive one key from a password (or passphrase).
[ "Derive", "one", "key", "from", "a", "password", "(", "or", "passphrase", ")", "." ]
def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): """Derive one key from a password (or passphrase). This function performs key derivation according to an old version of the PKCS#5 standard (v1.5) or `RFC2898 <https://www.ietf.org/rfc/rfc2898.txt>`_. Args: password (string): The secret password to generate the key from. salt (byte string): An 8 byte string to use for better protection from dictionary attacks. This value does not need to be kept secret, but it should be randomly chosen for each derivation. dkLen (integer): The length of the desired key. The default is 16 bytes, suitable for instance for :mod:`Crypto.Cipher.AES`. count (integer): The number of iterations to carry out. The recommendation is 1000 or more. hashAlgo (module): The hash algorithm to use, as a module or an object from the :mod:`Crypto.Hash` package. The digest length must be no shorter than ``dkLen``. The default algorithm is :mod:`Crypto.Hash.SHA1`. Return: A byte string of length ``dkLen`` that can be used as key. """ if not hashAlgo: hashAlgo = SHA1 password = tobytes(password) pHash = hashAlgo.new(password+salt) digest = pHash.digest_size if dkLen > digest: raise TypeError("Selected hash algorithm has a too short digest (%d bytes)." % digest) if len(salt) != 8: raise ValueError("Salt is not 8 bytes long (%d bytes instead)." % len(salt)) for i in iter_range(count-1): pHash = pHash.new(pHash.digest()) return pHash.digest()[:dkLen]
[ "def", "PBKDF1", "(", "password", ",", "salt", ",", "dkLen", ",", "count", "=", "1000", ",", "hashAlgo", "=", "None", ")", ":", "if", "not", "hashAlgo", ":", "hashAlgo", "=", "SHA1", "password", "=", "tobytes", "(", "password", ")", "pHash", "=", "hashAlgo", ".", "new", "(", "password", "+", "salt", ")", "digest", "=", "pHash", ".", "digest_size", "if", "dkLen", ">", "digest", ":", "raise", "TypeError", "(", "\"Selected hash algorithm has a too short digest (%d bytes).\"", "%", "digest", ")", "if", "len", "(", "salt", ")", "!=", "8", ":", "raise", "ValueError", "(", "\"Salt is not 8 bytes long (%d bytes instead).\"", "%", "len", "(", "salt", ")", ")", "for", "i", "in", "iter_range", "(", "count", "-", "1", ")", ":", "pHash", "=", "pHash", ".", "new", "(", "pHash", ".", "digest", "(", ")", ")", "return", "pHash", ".", "digest", "(", ")", "[", ":", "dkLen", "]" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/ResourceManager/lib/Crypto/Protocol/KDF.py#L55-L95
SFTtech/openage
d6a08c53c48dc1e157807471df92197f6ca9e04d
openage/util/fslike/path.py
python
Path.with_suffix
(self, suffix)
return self.parent.joinpath(self.stem + suffix)
Returns path for different suffix (same parent and stem).
Returns path for different suffix (same parent and stem).
[ "Returns", "path", "for", "different", "suffix", "(", "same", "parent", "and", "stem", ")", "." ]
def with_suffix(self, suffix): """ Returns path for different suffix (same parent and stem). """ if isinstance(suffix, bytes): suffix = suffix.decode() return self.parent.joinpath(self.stem + suffix)
[ "def", "with_suffix", "(", "self", ",", "suffix", ")", ":", "if", "isinstance", "(", "suffix", ",", "bytes", ")", ":", "suffix", "=", "suffix", ".", "decode", "(", ")", "return", "self", ".", "parent", ".", "joinpath", "(", "self", ".", "stem", "+", "suffix", ")" ]
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/util/fslike/path.py#L317-L322
metashell/metashell
f4177e4854ea00c8dbc722cadab26ef413d798ea
3rd/templight/llvm/utils/docker/scripts/llvm_checksum/llvm_checksum.py
python
ReadLLVMChecksums
(f)
return checksums
Reads checksums from a text file, produced by WriteLLVMChecksums. Returns: A dict, mapping from project name to project checksum.
Reads checksums from a text file, produced by WriteLLVMChecksums.
[ "Reads", "checksums", "from", "a", "text", "file", "produced", "by", "WriteLLVMChecksums", "." ]
def ReadLLVMChecksums(f): """Reads checksums from a text file, produced by WriteLLVMChecksums. Returns: A dict, mapping from project name to project checksum. """ checksums = {} while True: line = f.readline() if line == "": break checksum, proj = line.split() checksums[proj] = checksum return checksums
[ "def", "ReadLLVMChecksums", "(", "f", ")", ":", "checksums", "=", "{", "}", "while", "True", ":", "line", "=", "f", ".", "readline", "(", ")", "if", "line", "==", "\"\"", ":", "break", "checksum", ",", "proj", "=", "line", ".", "split", "(", ")", "checksums", "[", "proj", "]", "=", "checksum", "return", "checksums" ]
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/llvm/utils/docker/scripts/llvm_checksum/llvm_checksum.py#L144-L157
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
buildscripts/task_generation/suite_split.py
python
GeneratedSuite.sub_suite_task_name
(self, index: Optional[int] = None)
return taskname.name_generated_task(self.task_name, index, len(self.sub_suites), self.build_variant)
Get the name of the task that runs one of the generated sub-suites. :param index: Index of suite or None for '_misc' suite. :return: Name of generated Evergreen task.
Get the name of the task that runs one of the generated sub-suites.
[ "Get", "the", "name", "of", "the", "task", "that", "runs", "one", "of", "the", "generated", "sub", "-", "suites", "." ]
def sub_suite_task_name(self, index: Optional[int] = None) -> str: """ Get the name of the task that runs one of the generated sub-suites. :param index: Index of suite or None for '_misc' suite. :return: Name of generated Evergreen task. """ return taskname.name_generated_task(self.task_name, index, len(self.sub_suites), self.build_variant)
[ "def", "sub_suite_task_name", "(", "self", ",", "index", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "str", ":", "return", "taskname", ".", "name_generated_task", "(", "self", ".", "task_name", ",", "index", ",", "len", "(", "self", ".", "sub_suites", ")", ",", "self", ".", "build_variant", ")" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/task_generation/suite_split.py#L110-L118
dicecco1/fpga_caffe
7a191704efd7873071cfef35772d7e7bf3e3cfd6
scripts/cpp_lint.py
python
_BlockInfo.CheckBegin
(self, filename, clean_lines, linenum, error)
Run checks that applies to text up to the opening brace. This is mostly for checking the text after the class identifier and the "{", usually where the base class is specified. For other blocks, there isn't much to check, so we always pass. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Run checks that applies to text up to the opening brace.
[ "Run", "checks", "that", "applies", "to", "text", "up", "to", "the", "opening", "brace", "." ]
def CheckBegin(self, filename, clean_lines, linenum, error): """Run checks that applies to text up to the opening brace. This is mostly for checking the text after the class identifier and the "{", usually where the base class is specified. For other blocks, there isn't much to check, so we always pass. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ pass
[ "def", "CheckBegin", "(", "self", ",", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "pass" ]
https://github.com/dicecco1/fpga_caffe/blob/7a191704efd7873071cfef35772d7e7bf3e3cfd6/scripts/cpp_lint.py#L1767-L1780
argman/EAST
dca414de39a3a4915a019c9a02c1832a31cdd0ca
icdar.py
python
check_and_validate_polys
(polys, tags, xxx_todo_changeme)
return np.array(validated_polys), np.array(validated_tags)
check so that the text poly is in the same direction, and also filter some invalid polygons :param polys: :param tags: :return:
check so that the text poly is in the same direction, and also filter some invalid polygons :param polys: :param tags: :return:
[ "check", "so", "that", "the", "text", "poly", "is", "in", "the", "same", "direction", "and", "also", "filter", "some", "invalid", "polygons", ":", "param", "polys", ":", ":", "param", "tags", ":", ":", "return", ":" ]
def check_and_validate_polys(polys, tags, xxx_todo_changeme): ''' check so that the text poly is in the same direction, and also filter some invalid polygons :param polys: :param tags: :return: ''' (h, w) = xxx_todo_changeme if polys.shape[0] == 0: return polys polys[:, :, 0] = np.clip(polys[:, :, 0], 0, w-1) polys[:, :, 1] = np.clip(polys[:, :, 1], 0, h-1) validated_polys = [] validated_tags = [] for poly, tag in zip(polys, tags): p_area = polygon_area(poly) if abs(p_area) < 1: # print poly print('invalid poly') continue if p_area > 0: print('poly in wrong direction') poly = poly[(0, 3, 2, 1), :] validated_polys.append(poly) validated_tags.append(tag) return np.array(validated_polys), np.array(validated_tags)
[ "def", "check_and_validate_polys", "(", "polys", ",", "tags", ",", "xxx_todo_changeme", ")", ":", "(", "h", ",", "w", ")", "=", "xxx_todo_changeme", "if", "polys", ".", "shape", "[", "0", "]", "==", "0", ":", "return", "polys", "polys", "[", ":", ",", ":", ",", "0", "]", "=", "np", ".", "clip", "(", "polys", "[", ":", ",", ":", ",", "0", "]", ",", "0", ",", "w", "-", "1", ")", "polys", "[", ":", ",", ":", ",", "1", "]", "=", "np", ".", "clip", "(", "polys", "[", ":", ",", ":", ",", "1", "]", ",", "0", ",", "h", "-", "1", ")", "validated_polys", "=", "[", "]", "validated_tags", "=", "[", "]", "for", "poly", ",", "tag", "in", "zip", "(", "polys", ",", "tags", ")", ":", "p_area", "=", "polygon_area", "(", "poly", ")", "if", "abs", "(", "p_area", ")", "<", "1", ":", "# print poly", "print", "(", "'invalid poly'", ")", "continue", "if", "p_area", ">", "0", ":", "print", "(", "'poly in wrong direction'", ")", "poly", "=", "poly", "[", "(", "0", ",", "3", ",", "2", ",", "1", ")", ",", ":", "]", "validated_polys", ".", "append", "(", "poly", ")", "validated_tags", ".", "append", "(", "tag", ")", "return", "np", ".", "array", "(", "validated_polys", ")", ",", "np", ".", "array", "(", "validated_tags", ")" ]
https://github.com/argman/EAST/blob/dca414de39a3a4915a019c9a02c1832a31cdd0ca/icdar.py#L85-L112
panda3d/panda3d
833ad89ebad58395d0af0b7ec08538e5e4308265
makepanda/installpanda.py
python
GetDebLibDir
()
return "lib"
Returns the lib dir according to the debian system.
Returns the lib dir according to the debian system.
[ "Returns", "the", "lib", "dir", "according", "to", "the", "debian", "system", "." ]
def GetDebLibDir(): """ Returns the lib dir according to the debian system. """ # We're on Debian or Ubuntu, which use multiarch directories. # Call dpkg-architecture to get the multiarch libdir. handle = os.popen("dpkg-architecture -qDEB_HOST_MULTIARCH") multiarch = handle.read().strip() if handle.close(): # It failed. Old Debian/Ubuntu version? pass elif len(multiarch) > 0: return "lib/" + multiarch return "lib"
[ "def", "GetDebLibDir", "(", ")", ":", "# We're on Debian or Ubuntu, which use multiarch directories.", "# Call dpkg-architecture to get the multiarch libdir.", "handle", "=", "os", ".", "popen", "(", "\"dpkg-architecture -qDEB_HOST_MULTIARCH\"", ")", "multiarch", "=", "handle", ".", "read", "(", ")", ".", "strip", "(", ")", "if", "handle", ".", "close", "(", ")", ":", "# It failed. Old Debian/Ubuntu version?", "pass", "elif", "len", "(", "multiarch", ")", ">", "0", ":", "return", "\"lib/\"", "+", "multiarch", "return", "\"lib\"" ]
https://github.com/panda3d/panda3d/blob/833ad89ebad58395d0af0b7ec08538e5e4308265/makepanda/installpanda.py#L90-L102
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/datasets/samples_generator.py
python
make_friedman2
(n_samples=100, noise=0.0, random_state=None)
return X, y
Generate the "Friedman \#2" regression problem This dataset is described in Friedman [1] and Breiman [2]. Inputs `X` are 4 independent features uniformly distributed on the intervals:: 0 <= X[:, 0] <= 100, 40 * pi <= X[:, 1] <= 560 * pi, 0 <= X[:, 2] <= 1, 1 <= X[:, 3] <= 11. The output `y` is created according to the formula:: y(X) = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] \ - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 + noise * N(0, 1). Read more in the :ref:`User Guide <sample_generators>`. Parameters ---------- n_samples : int, optional (default=100) The number of samples. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 4] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals of Statistics 19 (1), pages 1-67, 1991. .. [2] L. Breiman, "Bagging predictors", Machine Learning 24, pages 123-140, 1996.
Generate the "Friedman \#2" regression problem
[ "Generate", "the", "Friedman", "\\", "#2", "regression", "problem" ]
def make_friedman2(n_samples=100, noise=0.0, random_state=None): """Generate the "Friedman \#2" regression problem This dataset is described in Friedman [1] and Breiman [2]. Inputs `X` are 4 independent features uniformly distributed on the intervals:: 0 <= X[:, 0] <= 100, 40 * pi <= X[:, 1] <= 560 * pi, 0 <= X[:, 2] <= 1, 1 <= X[:, 3] <= 11. The output `y` is created according to the formula:: y(X) = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] \ - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 + noise * N(0, 1). Read more in the :ref:`User Guide <sample_generators>`. Parameters ---------- n_samples : int, optional (default=100) The number of samples. noise : float, optional (default=0.0) The standard deviation of the gaussian noise applied to the output. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Returns ------- X : array of shape [n_samples, 4] The input samples. y : array of shape [n_samples] The output values. References ---------- .. [1] J. Friedman, "Multivariate adaptive regression splines", The Annals of Statistics 19 (1), pages 1-67, 1991. .. [2] L. Breiman, "Bagging predictors", Machine Learning 24, pages 123-140, 1996. """ generator = check_random_state(random_state) X = generator.rand(n_samples, 4) X[:, 0] *= 100 X[:, 1] *= 520 * np.pi X[:, 1] += 40 * np.pi X[:, 3] *= 10 X[:, 3] += 1 y = (X[:, 0] ** 2 + (X[:, 1] * X[:, 2] - 1 / (X[:, 1] * X[:, 3])) ** 2) ** 0.5 \ + noise * generator.randn(n_samples) return X, y
[ "def", "make_friedman2", "(", "n_samples", "=", "100", ",", "noise", "=", "0.0", ",", "random_state", "=", "None", ")", ":", "generator", "=", "check_random_state", "(", "random_state", ")", "X", "=", "generator", ".", "rand", "(", "n_samples", ",", "4", ")", "X", "[", ":", ",", "0", "]", "*=", "100", "X", "[", ":", ",", "1", "]", "*=", "520", "*", "np", ".", "pi", "X", "[", ":", ",", "1", "]", "+=", "40", "*", "np", ".", "pi", "X", "[", ":", ",", "3", "]", "*=", "10", "X", "[", ":", ",", "3", "]", "+=", "1", "y", "=", "(", "X", "[", ":", ",", "0", "]", "**", "2", "+", "(", "X", "[", ":", ",", "1", "]", "*", "X", "[", ":", ",", "2", "]", "-", "1", "/", "(", "X", "[", ":", ",", "1", "]", "*", "X", "[", ":", ",", "3", "]", ")", ")", "**", "2", ")", "**", "0.5", "+", "noise", "*", "generator", ".", "randn", "(", "n_samples", ")", "return", "X", ",", "y" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/datasets/samples_generator.py#L837-L900
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/python/training/basic_session_run_hooks.py
python
SummarySaverHook.__init__
(self, save_steps=100, output_dir=None, summary_writer=None, scaffold=None, summary_op=None)
Initializes a `SummarySaver` monitor. Args: save_steps: `int`, save summaries every N steps. See `EveryN`. output_dir: `string`, the directory to save the summaries to. Only used if no `summary_writer` is supplied. summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed, one will be created accordingly. scaffold: `Scaffold` to get summary_op if it's not provided. summary_op: `Tensor` of type `string`. A serialized `Summary` protocol buffer, as output by TF summary methods like `scalar_summary` or `merge_all_summaries`.
Initializes a `SummarySaver` monitor.
[ "Initializes", "a", "SummarySaver", "monitor", "." ]
def __init__(self, save_steps=100, output_dir=None, summary_writer=None, scaffold=None, summary_op=None): """Initializes a `SummarySaver` monitor. Args: save_steps: `int`, save summaries every N steps. See `EveryN`. output_dir: `string`, the directory to save the summaries to. Only used if no `summary_writer` is supplied. summary_writer: `SummaryWriter`. If `None` and an `output_dir` was passed, one will be created accordingly. scaffold: `Scaffold` to get summary_op if it's not provided. summary_op: `Tensor` of type `string`. A serialized `Summary` protocol buffer, as output by TF summary methods like `scalar_summary` or `merge_all_summaries`. """ # TODO(ipolosukhin): Implement every N seconds. self._summary_op = summary_op self._summary_writer = summary_writer if summary_writer is None and output_dir: self._summary_writer = SummaryWriterCache.get(output_dir) self._scaffold = scaffold self._save_steps = save_steps
[ "def", "__init__", "(", "self", ",", "save_steps", "=", "100", ",", "output_dir", "=", "None", ",", "summary_writer", "=", "None", ",", "scaffold", "=", "None", ",", "summary_op", "=", "None", ")", ":", "# TODO(ipolosukhin): Implement every N seconds.", "self", ".", "_summary_op", "=", "summary_op", "self", ".", "_summary_writer", "=", "summary_writer", "if", "summary_writer", "is", "None", "and", "output_dir", ":", "self", ".", "_summary_writer", "=", "SummaryWriterCache", ".", "get", "(", "output_dir", ")", "self", ".", "_scaffold", "=", "scaffold", "self", ".", "_save_steps", "=", "save_steps" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/training/basic_session_run_hooks.py#L321-L346
koth/kcws
88efbd36a7022de4e6e90f5a1fb880cf87cfae9f
third_party/python/cpplint/cpplint.py
python
FindNextMultiLineCommentStart
(lines, lineix)
return len(lines)
Find the beginning marker for a multiline comment.
Find the beginning marker for a multiline comment.
[ "Find", "the", "beginning", "marker", "for", "a", "multiline", "comment", "." ]
def FindNextMultiLineCommentStart(lines, lineix): """Find the beginning marker for a multiline comment.""" while lineix < len(lines): if lines[lineix].strip().startswith('/*'): # Only return this marker if the comment goes beyond this line if lines[lineix].strip().find('*/', 2) < 0: return lineix lineix += 1 return len(lines)
[ "def", "FindNextMultiLineCommentStart", "(", "lines", ",", "lineix", ")", ":", "while", "lineix", "<", "len", "(", "lines", ")", ":", "if", "lines", "[", "lineix", "]", ".", "strip", "(", ")", ".", "startswith", "(", "'/*'", ")", ":", "# Only return this marker if the comment goes beyond this line", "if", "lines", "[", "lineix", "]", ".", "strip", "(", ")", ".", "find", "(", "'*/'", ",", "2", ")", "<", "0", ":", "return", "lineix", "lineix", "+=", "1", "return", "len", "(", "lines", ")" ]
https://github.com/koth/kcws/blob/88efbd36a7022de4e6e90f5a1fb880cf87cfae9f/third_party/python/cpplint/cpplint.py#L1231-L1239
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/seacas/scripts/exomerge2.py
python
ExodusModel.get_node_field_values
(self, node_field_name, timestep='last')
return self.node_fields[node_field_name][timestep_index]
Return the list of node field values for the given field and timestep. This returns the actual list of values, so any modifications to the list will be retained in the model. Examples: >>> model.get_node_field_values('disp_x') >>> model.get_node_field_values('disp_x', 0.0)
Return the list of node field values for the given field and timestep.
[ "Return", "the", "list", "of", "node", "field", "values", "for", "the", "given", "field", "and", "timestep", "." ]
def get_node_field_values(self, node_field_name, timestep='last'): """ Return the list of node field values for the given field and timestep. This returns the actual list of values, so any modifications to the list will be retained in the model. Examples: >>> model.get_node_field_values('disp_x') >>> model.get_node_field_values('disp_x', 0.0) """ [node_field_name] = self._format_id_list( [node_field_name], self.get_node_field_names(), 'node field', single=True) timestep_index = self._get_internal_timestep_index(timestep) return self.node_fields[node_field_name][timestep_index]
[ "def", "get_node_field_values", "(", "self", ",", "node_field_name", ",", "timestep", "=", "'last'", ")", ":", "[", "node_field_name", "]", "=", "self", ".", "_format_id_list", "(", "[", "node_field_name", "]", ",", "self", ".", "get_node_field_names", "(", ")", ",", "'node field'", ",", "single", "=", "True", ")", "timestep_index", "=", "self", ".", "_get_internal_timestep_index", "(", "timestep", ")", "return", "self", ".", "node_fields", "[", "node_field_name", "]", "[", "timestep_index", "]" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge2.py#L4970-L4988
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/compiler/tf2xla/python/xla.py
python
reduce_window
(operand, init, reducer, window_dimensions, window_strides=None, base_dilations=None, window_dilations=None, padding=None, name=None)
return gen_xla_ops.xla_reduce_window( input=operand, init_value=init, window_dimensions=window_dimensions, window_strides=window_strides, base_dilations=base_dilations, window_dilations=window_dilations, padding=padding, computation=reducer, name=name)
Wraps the XLA ReduceWindow operator. ReduceWindow is documented at https://www.tensorflow.org/performance/xla/operation_semantics#reducewindow . Args: operand: the input tensor init: a scalar tensor representing the initial value for the reduction reducer: a reduction function that combines a pair of scalars. window_dimensions: shape of the window, as a list of integers window_strides: inter-window strides, as a list of integers. Optional; if omitted, defaults to strides of 1. padding: padding to apply to 'operand'. List of (low, high) pairs of integers that specify the padding to apply before and after each dimension. Optional; if omitted, defaults to no padding. name: the operator name, or None. Returns: A tensor that represents the output of the reduce_window operator.
Wraps the XLA ReduceWindow operator.
[ "Wraps", "the", "XLA", "ReduceWindow", "operator", "." ]
def reduce_window(operand, init, reducer, window_dimensions, window_strides=None, base_dilations=None, window_dilations=None, padding=None, name=None): """Wraps the XLA ReduceWindow operator. ReduceWindow is documented at https://www.tensorflow.org/performance/xla/operation_semantics#reducewindow . Args: operand: the input tensor init: a scalar tensor representing the initial value for the reduction reducer: a reduction function that combines a pair of scalars. window_dimensions: shape of the window, as a list of integers window_strides: inter-window strides, as a list of integers. Optional; if omitted, defaults to strides of 1. padding: padding to apply to 'operand'. List of (low, high) pairs of integers that specify the padding to apply before and after each dimension. Optional; if omitted, defaults to no padding. name: the operator name, or None. Returns: A tensor that represents the output of the reduce_window operator. """ window_strides = window_strides or [1] * len(window_dimensions) base_dilations = base_dilations or [1] * len(window_dimensions) window_dilations = window_dilations or [1] * len(window_dimensions) padding = padding or [(0, 0)] * len(window_dimensions) return gen_xla_ops.xla_reduce_window( input=operand, init_value=init, window_dimensions=window_dimensions, window_strides=window_strides, base_dilations=base_dilations, window_dilations=window_dilations, padding=padding, computation=reducer, name=name)
[ "def", "reduce_window", "(", "operand", ",", "init", ",", "reducer", ",", "window_dimensions", ",", "window_strides", "=", "None", ",", "base_dilations", "=", "None", ",", "window_dilations", "=", "None", ",", "padding", "=", "None", ",", "name", "=", "None", ")", ":", "window_strides", "=", "window_strides", "or", "[", "1", "]", "*", "len", "(", "window_dimensions", ")", "base_dilations", "=", "base_dilations", "or", "[", "1", "]", "*", "len", "(", "window_dimensions", ")", "window_dilations", "=", "window_dilations", "or", "[", "1", "]", "*", "len", "(", "window_dimensions", ")", "padding", "=", "padding", "or", "[", "(", "0", ",", "0", ")", "]", "*", "len", "(", "window_dimensions", ")", "return", "gen_xla_ops", ".", "xla_reduce_window", "(", "input", "=", "operand", ",", "init_value", "=", "init", ",", "window_dimensions", "=", "window_dimensions", ",", "window_strides", "=", "window_strides", ",", "base_dilations", "=", "base_dilations", ",", "window_dilations", "=", "window_dilations", ",", "padding", "=", "padding", ",", "computation", "=", "reducer", ",", "name", "=", "name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/compiler/tf2xla/python/xla.py#L330-L372
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/protobuf/py3/google/protobuf/symbol_database.py
python
SymbolDatabase.RegisterMessage
(self, message)
return message
Registers the given message type in the local database. Calls to GetSymbol() and GetMessages() will return messages registered here. Args: message: A :class:`google.protobuf.message.Message` subclass (or instance); its descriptor will be registered. Returns: The provided message.
Registers the given message type in the local database.
[ "Registers", "the", "given", "message", "type", "in", "the", "local", "database", "." ]
def RegisterMessage(self, message): """Registers the given message type in the local database. Calls to GetSymbol() and GetMessages() will return messages registered here. Args: message: A :class:`google.protobuf.message.Message` subclass (or instance); its descriptor will be registered. Returns: The provided message. """ desc = message.DESCRIPTOR self._classes[desc] = message self.RegisterMessageDescriptor(desc) return message
[ "def", "RegisterMessage", "(", "self", ",", "message", ")", ":", "desc", "=", "message", ".", "DESCRIPTOR", "self", ".", "_classes", "[", "desc", "]", "=", "message", "self", ".", "RegisterMessageDescriptor", "(", "desc", ")", "return", "message" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py3/google/protobuf/symbol_database.py#L69-L85
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/perf/benchmarks/indexeddb_perf.py
python
_IndexedDbMeasurement.DidStartBrowser
(self, browser)
Initialize metrics once right after the browser has been launched.
Initialize metrics once right after the browser has been launched.
[ "Initialize", "metrics", "once", "right", "after", "the", "browser", "has", "been", "launched", "." ]
def DidStartBrowser(self, browser): """Initialize metrics once right after the browser has been launched.""" self._memory_metric = memory.MemoryMetric(browser) self._v8_object_stats_metric = ( v8_object_stats.V8ObjectStatsMetric(_V8_COUNTER_NAMES))
[ "def", "DidStartBrowser", "(", "self", ",", "browser", ")", ":", "self", ".", "_memory_metric", "=", "memory", ".", "MemoryMetric", "(", "browser", ")", "self", ".", "_v8_object_stats_metric", "=", "(", "v8_object_stats", ".", "V8ObjectStatsMetric", "(", "_V8_COUNTER_NAMES", ")", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/perf/benchmarks/indexeddb_perf.py#L45-L49
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
parserCtxt.lineNumbers
(self, linenumbers)
Switch on the generation of line number for elements nodes.
Switch on the generation of line number for elements nodes.
[ "Switch", "on", "the", "generation", "of", "line", "number", "for", "elements", "nodes", "." ]
def lineNumbers(self, linenumbers): """Switch on the generation of line number for elements nodes. """ libxml2mod.xmlParserSetLineNumbers(self._o, linenumbers)
[ "def", "lineNumbers", "(", "self", ",", "linenumbers", ")", ":", "libxml2mod", ".", "xmlParserSetLineNumbers", "(", "self", ".", "_o", ",", "linenumbers", ")" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L4140-L4142
google/clif
cab24d6a105609a65c95a36a1712ae3c20c7b5df
clif/python/ast_manipulations.py
python
MoveExtendPropertiesBackIntoClassesInPlace
(ast, class_decls_by_fq_native)
See module docstring.
See module docstring.
[ "See", "module", "docstring", "." ]
def MoveExtendPropertiesBackIntoClassesInPlace(ast, class_decls_by_fq_native): """See module docstring.""" extend_properties_orig_decl_indices = [] for orig_decl_index, decl in enumerate(ast.decls): if decl.decltype != ast_pb2.Decl.Type.VAR: continue if not decl.var.is_extend_variable: continue fq_native_from_var, property_name_from_var = decl.var.name.native.split( EXTEND_INFIX, 1) target_class_decl = class_decls_by_fq_native[fq_native_from_var] property_decl = ast_pb2.Decl() property_decl.CopyFrom(decl) property_decl.var.name.native = property_name_from_var if property_decl.var.HasField('cpp_set'): del property_decl.var.cpp_set.params[0] target_class_decl.members.append(property_decl) extend_properties_orig_decl_indices.append(orig_decl_index) for orig_decl_index in reversed(extend_properties_orig_decl_indices): del ast.decls[orig_decl_index]
[ "def", "MoveExtendPropertiesBackIntoClassesInPlace", "(", "ast", ",", "class_decls_by_fq_native", ")", ":", "extend_properties_orig_decl_indices", "=", "[", "]", "for", "orig_decl_index", ",", "decl", "in", "enumerate", "(", "ast", ".", "decls", ")", ":", "if", "decl", ".", "decltype", "!=", "ast_pb2", ".", "Decl", ".", "Type", ".", "VAR", ":", "continue", "if", "not", "decl", ".", "var", ".", "is_extend_variable", ":", "continue", "fq_native_from_var", ",", "property_name_from_var", "=", "decl", ".", "var", ".", "name", ".", "native", ".", "split", "(", "EXTEND_INFIX", ",", "1", ")", "target_class_decl", "=", "class_decls_by_fq_native", "[", "fq_native_from_var", "]", "property_decl", "=", "ast_pb2", ".", "Decl", "(", ")", "property_decl", ".", "CopyFrom", "(", "decl", ")", "property_decl", ".", "var", ".", "name", ".", "native", "=", "property_name_from_var", "if", "property_decl", ".", "var", ".", "HasField", "(", "'cpp_set'", ")", ":", "del", "property_decl", ".", "var", ".", "cpp_set", ".", "params", "[", "0", "]", "target_class_decl", ".", "members", ".", "append", "(", "property_decl", ")", "extend_properties_orig_decl_indices", ".", "append", "(", "orig_decl_index", ")", "for", "orig_decl_index", "in", "reversed", "(", "extend_properties_orig_decl_indices", ")", ":", "del", "ast", ".", "decls", "[", "orig_decl_index", "]" ]
https://github.com/google/clif/blob/cab24d6a105609a65c95a36a1712ae3c20c7b5df/clif/python/ast_manipulations.py#L177-L196
LLNL/lbann
26083e6c86050302ce33148aea70f62e61cacb92
applications/ATOM/train_atom_char_rnn.py
python
construct_data_reader
(run_args)
return message
Construct Protobuf message for Python data reader. The Python data reader will import this Python file to access the sample access functions.
Construct Protobuf message for Python data reader.
[ "Construct", "Protobuf", "message", "for", "Python", "data", "reader", "." ]
def construct_data_reader(run_args): """ Construct Protobuf message for Python data reader. The Python data reader will import this Python file to access the sample access functions. """ module_file = os.path.abspath(run_args.data_module_file) os.environ["DATA_CONFIG"] = os.path.abspath(run_args.data_config) module_name = os.path.splitext(os.path.basename(module_file))[0] module_dir = os.path.dirname(module_file) print("module_name: {}\tmodule_dir: {}".format(module_name, module_dir)) # Base data reader message message = lbann.reader_pb2.DataReader() # Training set data reader data_reader = message.reader.add() data_reader.name = "python" data_reader.role = "train" data_reader.shuffle = True data_reader.percent_of_data_to_use = 1.0 data_reader.python.module = module_name data_reader.python.module_dir = module_dir data_reader.python.sample_function = "get_sample" data_reader.python.num_samples_function = "num_samples" data_reader.python.sample_dims_function = "sample_dims" return message
[ "def", "construct_data_reader", "(", "run_args", ")", ":", "module_file", "=", "os", ".", "path", ".", "abspath", "(", "run_args", ".", "data_module_file", ")", "os", ".", "environ", "[", "\"DATA_CONFIG\"", "]", "=", "os", ".", "path", ".", "abspath", "(", "run_args", ".", "data_config", ")", "module_name", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "module_file", ")", ")", "[", "0", "]", "module_dir", "=", "os", ".", "path", ".", "dirname", "(", "module_file", ")", "print", "(", "\"module_name: {}\\tmodule_dir: {}\"", ".", "format", "(", "module_name", ",", "module_dir", ")", ")", "# Base data reader message", "message", "=", "lbann", ".", "reader_pb2", ".", "DataReader", "(", ")", "# Training set data reader", "data_reader", "=", "message", ".", "reader", ".", "add", "(", ")", "data_reader", ".", "name", "=", "\"python\"", "data_reader", ".", "role", "=", "\"train\"", "data_reader", ".", "shuffle", "=", "True", "data_reader", ".", "percent_of_data_to_use", "=", "1.0", "data_reader", ".", "python", ".", "module", "=", "module_name", "data_reader", ".", "python", ".", "module_dir", "=", "module_dir", "data_reader", ".", "python", ".", "sample_function", "=", "\"get_sample\"", "data_reader", ".", "python", ".", "num_samples_function", "=", "\"num_samples\"", "data_reader", ".", "python", ".", "sample_dims_function", "=", "\"sample_dims\"", "return", "message" ]
https://github.com/LLNL/lbann/blob/26083e6c86050302ce33148aea70f62e61cacb92/applications/ATOM/train_atom_char_rnn.py#L201-L233
amrayn/easyloggingpp
8489989bb26c6371df103f6cbced3fbee1bc3c2f
tools/cpplint.py
python
CheckPosixThreading
(filename, clean_lines, linenum, error)
Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Checks for calls to thread-unsafe functions.
[ "Checks", "for", "calls", "to", "thread", "-", "unsafe", "functions", "." ]
def CheckPosixThreading(filename, clean_lines, linenum, error): """Checks for calls to thread-unsafe functions. Much code has been originally written without consideration of multi-threading. Also, engineers are relying on their old experience; they have learned posix before threading extensions were added. These tests guide the engineers to use thread-safe functions (when using posix directly). Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] for single_thread_function, multithread_safe_function in threading_list: ix = line.find(single_thread_function) # Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison if ix >= 0 and (ix == 0 or (not line[ix - 1].isalnum() and line[ix - 1] not in ('_', '.', '>'))): error(filename, linenum, 'runtime/threadsafe_fn', 2, 'Consider using ' + multithread_safe_function + '...) instead of ' + single_thread_function + '...) for improved thread safety.')
[ "def", "CheckPosixThreading", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "for", "single_thread_function", ",", "multithread_safe_function", "in", "threading_list", ":", "ix", "=", "line", ".", "find", "(", "single_thread_function", ")", "# Comparisons made explicit for clarity -- pylint: disable=g-explicit-bool-comparison", "if", "ix", ">=", "0", "and", "(", "ix", "==", "0", "or", "(", "not", "line", "[", "ix", "-", "1", "]", ".", "isalnum", "(", ")", "and", "line", "[", "ix", "-", "1", "]", "not", "in", "(", "'_'", ",", "'.'", ",", "'>'", ")", ")", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'runtime/threadsafe_fn'", ",", "2", ",", "'Consider using '", "+", "multithread_safe_function", "+", "'...) instead of '", "+", "single_thread_function", "+", "'...) for improved thread safety.'", ")" ]
https://github.com/amrayn/easyloggingpp/blob/8489989bb26c6371df103f6cbced3fbee1bc3c2f/tools/cpplint.py#L1558-L1582
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/richtext.py
python
RichTextCtrl.GetDefaultStyle
(*args, **kwargs)
return _richtext.RichTextCtrl_GetDefaultStyle(*args, **kwargs)
GetDefaultStyle(self) -> RichTextAttr Retrieves a copy of the default style object.
GetDefaultStyle(self) -> RichTextAttr
[ "GetDefaultStyle", "(", "self", ")", "-", ">", "RichTextAttr" ]
def GetDefaultStyle(*args, **kwargs): """ GetDefaultStyle(self) -> RichTextAttr Retrieves a copy of the default style object. """ return _richtext.RichTextCtrl_GetDefaultStyle(*args, **kwargs)
[ "def", "GetDefaultStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_richtext", ".", "RichTextCtrl_GetDefaultStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L3173-L3179
Z3Prover/z3
d745d03afdfdf638d66093e2bfbacaf87187f35b
src/api/python/z3/z3.py
python
Solver.reset
(self)
Remove all asserted constraints and backtracking points created using `push()`. >>> x = Int('x') >>> s = Solver() >>> s.add(x > 0) >>> s [x > 0] >>> s.reset() >>> s []
Remove all asserted constraints and backtracking points created using `push()`.
[ "Remove", "all", "asserted", "constraints", "and", "backtracking", "points", "created", "using", "push", "()", "." ]
def reset(self): """Remove all asserted constraints and backtracking points created using `push()`. >>> x = Int('x') >>> s = Solver() >>> s.add(x > 0) >>> s [x > 0] >>> s.reset() >>> s [] """ Z3_solver_reset(self.ctx.ref(), self.solver)
[ "def", "reset", "(", "self", ")", ":", "Z3_solver_reset", "(", "self", ".", "ctx", ".", "ref", "(", ")", ",", "self", ".", "solver", ")" ]
https://github.com/Z3Prover/z3/blob/d745d03afdfdf638d66093e2bfbacaf87187f35b/src/api/python/z3/z3.py#L6925-L6937
SonarOpenCommunity/sonar-cxx
6e1d456fdcd45d35bcdc61c980e34d85fe88971e
cxx-sensors/src/tools/vc_createrules.py
python
read_warnings
()
return warnings
Read warnings from HTML pages. - root pages are defined in URLS - special property values are defined in RULE_MAP
Read warnings from HTML pages. - root pages are defined in URLS - special property values are defined in RULE_MAP
[ "Read", "warnings", "from", "HTML", "pages", ".", "-", "root", "pages", "are", "defined", "in", "URLS", "-", "special", "property", "values", "are", "defined", "in", "RULE_MAP" ]
def read_warnings(): """ Read warnings from HTML pages. - root pages are defined in URLS - special property values are defined in RULE_MAP """ # page contains JavaScript. Use Firefox to create HTML page # you have to download and install https://github.com/mozilla/geckodriver/releases browser = webdriver.Firefox(executable_path=r'C:\Program Files\geckodriver\geckodriver.exe') # read links to warning pages from menu of overview pages warnings = {} for url, properties in URLS.items(): page_source = read_page_source(browser, url) parse_warning_hrefs(page_source, warnings) for key, warning in warnings.items(): assign_warning_properties(warning, properties, False) # warnings = dict(list(warnings.items())[:1]) # for testing only # sort warnings ascending by message number warnings = dict(sorted(warnings.items(), key=sorter)) # read cotent of warning pages read_warning_pages(browser, warnings) # override defaults for key, defaults in RULE_MAP.items(): if key in warnings: warning = warnings[key] assign_warning_properties(warning, defaults, True) # close browser browser.quit() return warnings
[ "def", "read_warnings", "(", ")", ":", "# page contains JavaScript. Use Firefox to create HTML page", "# you have to download and install https://github.com/mozilla/geckodriver/releases", "browser", "=", "webdriver", ".", "Firefox", "(", "executable_path", "=", "r'C:\\Program Files\\geckodriver\\geckodriver.exe'", ")", "# read links to warning pages from menu of overview pages", "warnings", "=", "{", "}", "for", "url", ",", "properties", "in", "URLS", ".", "items", "(", ")", ":", "page_source", "=", "read_page_source", "(", "browser", ",", "url", ")", "parse_warning_hrefs", "(", "page_source", ",", "warnings", ")", "for", "key", ",", "warning", "in", "warnings", ".", "items", "(", ")", ":", "assign_warning_properties", "(", "warning", ",", "properties", ",", "False", ")", "# warnings = dict(list(warnings.items())[:1]) # for testing only", "# sort warnings ascending by message number", "warnings", "=", "dict", "(", "sorted", "(", "warnings", ".", "items", "(", ")", ",", "key", "=", "sorter", ")", ")", "# read cotent of warning pages", "read_warning_pages", "(", "browser", ",", "warnings", ")", "# override defaults", "for", "key", ",", "defaults", "in", "RULE_MAP", ".", "items", "(", ")", ":", "if", "key", "in", "warnings", ":", "warning", "=", "warnings", "[", "key", "]", "assign_warning_properties", "(", "warning", ",", "defaults", ",", "True", ")", "# close browser", "browser", ".", "quit", "(", ")", "return", "warnings" ]
https://github.com/SonarOpenCommunity/sonar-cxx/blob/6e1d456fdcd45d35bcdc61c980e34d85fe88971e/cxx-sensors/src/tools/vc_createrules.py#L456-L490
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pydoc.py
python
locate
(path, forceload=0)
return object
Locate an object by name or dotted path, importing as necessary.
Locate an object by name or dotted path, importing as necessary.
[ "Locate", "an", "object", "by", "name", "or", "dotted", "path", "importing", "as", "necessary", "." ]
def locate(path, forceload=0): """Locate an object by name or dotted path, importing as necessary.""" parts = [part for part in path.split('.') if part] module, n = None, 0 while n < len(parts): nextmodule = safeimport('.'.join(parts[:n+1]), forceload) if nextmodule: module, n = nextmodule, n + 1 else: break if module: object = module else: object = builtins for part in parts[n:]: try: object = getattr(object, part) except AttributeError: return None return object
[ "def", "locate", "(", "path", ",", "forceload", "=", "0", ")", ":", "parts", "=", "[", "part", "for", "part", "in", "path", ".", "split", "(", "'.'", ")", "if", "part", "]", "module", ",", "n", "=", "None", ",", "0", "while", "n", "<", "len", "(", "parts", ")", ":", "nextmodule", "=", "safeimport", "(", "'.'", ".", "join", "(", "parts", "[", ":", "n", "+", "1", "]", ")", ",", "forceload", ")", "if", "nextmodule", ":", "module", ",", "n", "=", "nextmodule", ",", "n", "+", "1", "else", ":", "break", "if", "module", ":", "object", "=", "module", "else", ":", "object", "=", "builtins", "for", "part", "in", "parts", "[", "n", ":", "]", ":", "try", ":", "object", "=", "getattr", "(", "object", ",", "part", ")", "except", "AttributeError", ":", "return", "None", "return", "object" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pydoc.py#L1602-L1619
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
contrib/gizmos/msw/gizmos.py
python
TreeListCtrl.AssignButtonsImageList
(*args, **kwargs)
return _gizmos.TreeListCtrl_AssignButtonsImageList(*args, **kwargs)
AssignButtonsImageList(self, ImageList imageList)
AssignButtonsImageList(self, ImageList imageList)
[ "AssignButtonsImageList", "(", "self", "ImageList", "imageList", ")" ]
def AssignButtonsImageList(*args, **kwargs): """AssignButtonsImageList(self, ImageList imageList)""" return _gizmos.TreeListCtrl_AssignButtonsImageList(*args, **kwargs)
[ "def", "AssignButtonsImageList", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gizmos", ".", "TreeListCtrl_AssignButtonsImageList", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/msw/gizmos.py#L551-L553
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/webapp2/webapp2.py
python
_to_utf8
(value)
return value.encode('utf-8')
Encodes a unicode value to UTF-8 if not yet encoded.
Encodes a unicode value to UTF-8 if not yet encoded.
[ "Encodes", "a", "unicode", "value", "to", "UTF", "-", "8", "if", "not", "yet", "encoded", "." ]
def _to_utf8(value): """Encodes a unicode value to UTF-8 if not yet encoded.""" if isinstance(value, str): return value return value.encode('utf-8')
[ "def", "_to_utf8", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "str", ")", ":", "return", "value", "return", "value", ".", "encode", "(", "'utf-8'", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/webapp2/webapp2.py#L1919-L1924
quantOS-org/DataCore
e2ef9bd2c22ee9e2845675b6435a14fa607f3551
mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/message_factory.py
python
_GetAllDescriptors
(desc_protos, package)
Gets all levels of nested message types as a flattened list of descriptors. Args: desc_protos: The descriptor protos to process. package: The package where the protos are defined. Yields: Each message descriptor for each nested type.
Gets all levels of nested message types as a flattened list of descriptors.
[ "Gets", "all", "levels", "of", "nested", "message", "types", "as", "a", "flattened", "list", "of", "descriptors", "." ]
def _GetAllDescriptors(desc_protos, package): """Gets all levels of nested message types as a flattened list of descriptors. Args: desc_protos: The descriptor protos to process. package: The package where the protos are defined. Yields: Each message descriptor for each nested type. """ for desc_proto in desc_protos: name = '.'.join((package, desc_proto.name)) yield _POOL.FindMessageTypeByName(name) for nested_desc in _GetAllDescriptors(desc_proto.nested_type, name): yield nested_desc
[ "def", "_GetAllDescriptors", "(", "desc_protos", ",", "package", ")", ":", "for", "desc_proto", "in", "desc_protos", ":", "name", "=", "'.'", ".", "join", "(", "(", "package", ",", "desc_proto", ".", "name", ")", ")", "yield", "_POOL", ".", "FindMessageTypeByName", "(", "name", ")", "for", "nested_desc", "in", "_GetAllDescriptors", "(", "desc_proto", ".", "nested_type", ",", "name", ")", ":", "yield", "nested_desc" ]
https://github.com/quantOS-org/DataCore/blob/e2ef9bd2c22ee9e2845675b6435a14fa607f3551/mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/message_factory.py#L98-L113
google/sentencepiece
8420f2179007c398c8b70f63cb12d8aec827397c
python/src/sentencepiece/__init__.py
python
_batchnize
(classname, name)
Enables batch request for the method classname.name.
Enables batch request for the method classname.name.
[ "Enables", "batch", "request", "for", "the", "method", "classname", ".", "name", "." ]
def _batchnize(classname, name): """Enables batch request for the method classname.name.""" func = getattr(classname, name, None) def _func(v, n): if type(n) is int and (n < 0 or n >= v.piece_size()): raise IndexError('piece id is out of range.') return func(v, n) def _batched_func(self, arg): if type(arg) is list: return [_func(self, n) for n in arg] else: return _func(self, arg) setattr(classname, name, _batched_func)
[ "def", "_batchnize", "(", "classname", ",", "name", ")", ":", "func", "=", "getattr", "(", "classname", ",", "name", ",", "None", ")", "def", "_func", "(", "v", ",", "n", ")", ":", "if", "type", "(", "n", ")", "is", "int", "and", "(", "n", "<", "0", "or", "n", ">=", "v", ".", "piece_size", "(", ")", ")", ":", "raise", "IndexError", "(", "'piece id is out of range.'", ")", "return", "func", "(", "v", ",", "n", ")", "def", "_batched_func", "(", "self", ",", "arg", ")", ":", "if", "type", "(", "arg", ")", "is", "list", ":", "return", "[", "_func", "(", "self", ",", "n", ")", "for", "n", "in", "arg", "]", "else", ":", "return", "_func", "(", "self", ",", "arg", ")", "setattr", "(", "classname", ",", "name", ",", "_batched_func", ")" ]
https://github.com/google/sentencepiece/blob/8420f2179007c398c8b70f63cb12d8aec827397c/python/src/sentencepiece/__init__.py#L489-L503
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/keras/python/keras/backend.py
python
mean
(x, axis=None, keepdims=False)
return math_ops.reduce_mean(x, axis=axis, keep_dims=keepdims)
Mean of a tensor, alongside the specified axis. Arguments: x: A tensor or variable. axis: A list of integer. Axes to compute the mean. keepdims: A boolean, whether to keep the dimensions or not. If `keepdims` is `False`, the rank of the tensor is reduced by 1 for each entry in `axis`. If `keep_dims` is `True`, the reduced dimensions are retained with length 1. Returns: A tensor with the mean of elements of `x`.
Mean of a tensor, alongside the specified axis.
[ "Mean", "of", "a", "tensor", "alongside", "the", "specified", "axis", "." ]
def mean(x, axis=None, keepdims=False): """Mean of a tensor, alongside the specified axis. Arguments: x: A tensor or variable. axis: A list of integer. Axes to compute the mean. keepdims: A boolean, whether to keep the dimensions or not. If `keepdims` is `False`, the rank of the tensor is reduced by 1 for each entry in `axis`. If `keep_dims` is `True`, the reduced dimensions are retained with length 1. Returns: A tensor with the mean of elements of `x`. """ axis = _normalize_axis(axis, ndim(x)) if x.dtype.base_dtype == dtypes_module.bool: x = math_ops.cast(x, floatx()) return math_ops.reduce_mean(x, axis=axis, keep_dims=keepdims)
[ "def", "mean", "(", "x", ",", "axis", "=", "None", ",", "keepdims", "=", "False", ")", ":", "axis", "=", "_normalize_axis", "(", "axis", ",", "ndim", "(", "x", ")", ")", "if", "x", ".", "dtype", ".", "base_dtype", "==", "dtypes_module", ".", "bool", ":", "x", "=", "math_ops", ".", "cast", "(", "x", ",", "floatx", "(", ")", ")", "return", "math_ops", ".", "reduce_mean", "(", "x", ",", "axis", "=", "axis", ",", "keep_dims", "=", "keepdims", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/keras/python/keras/backend.py#L1441-L1458
isl-org/Open3D
79aec3ddde6a571ce2f28e4096477e52ec465244
examples/python/visualization/online_processing.py
python
PipelineController.on_toggle_capture
(self, is_enabled)
Callback to toggle capture.
Callback to toggle capture.
[ "Callback", "to", "toggle", "capture", "." ]
def on_toggle_capture(self, is_enabled): """Callback to toggle capture.""" self.pipeline_model.flag_capture = is_enabled if not is_enabled: self.on_toggle_record(False) if self.pipeline_view.toggle_record is not None: self.pipeline_view.toggle_record.is_on = False else: with self.pipeline_model.cv_capture: self.pipeline_model.cv_capture.notify()
[ "def", "on_toggle_capture", "(", "self", ",", "is_enabled", ")", ":", "self", ".", "pipeline_model", ".", "flag_capture", "=", "is_enabled", "if", "not", "is_enabled", ":", "self", ".", "on_toggle_record", "(", "False", ")", "if", "self", ".", "pipeline_view", ".", "toggle_record", "is", "not", "None", ":", "self", ".", "pipeline_view", ".", "toggle_record", ".", "is_on", "=", "False", "else", ":", "with", "self", ".", "pipeline_model", ".", "cv_capture", ":", "self", ".", "pipeline_model", ".", "cv_capture", ".", "notify", "(", ")" ]
https://github.com/isl-org/Open3D/blob/79aec3ddde6a571ce2f28e4096477e52ec465244/examples/python/visualization/online_processing.py#L499-L508
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
build/android/pylib/android_commands.py
python
AndroidCommands.GetBuildType
(self)
return build_type
Returns the build type of the system (e.g. eng).
Returns the build type of the system (e.g. eng).
[ "Returns", "the", "build", "type", "of", "the", "system", "(", "e", ".", "g", ".", "eng", ")", "." ]
def GetBuildType(self): """Returns the build type of the system (e.g. eng).""" build_type = self.system_properties['ro.build.type'] assert build_type return build_type
[ "def", "GetBuildType", "(", "self", ")", ":", "build_type", "=", "self", ".", "system_properties", "[", "'ro.build.type'", "]", "assert", "build_type", "return", "build_type" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/build/android/pylib/android_commands.py#L1178-L1182
s9xie/DSN
065e49898d239f5c96be558616b2556eabc50351
scripts/cpp_lint.py
python
CloseExpression
(clean_lines, linenum, pos)
return (line, clean_lines.NumLines(), -1)
If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum.
If input points to ( or { or [ or <, finds the position that closes it.
[ "If", "input", "points", "to", "(", "or", "{", "or", "[", "or", "<", "finds", "the", "position", "that", "closes", "it", "." ]
def CloseExpression(clean_lines, linenum, pos): """If input points to ( or { or [ or <, finds the position that closes it. If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the linenum/pos that correspond to the closing of the expression. Args: clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. pos: A position on the line. Returns: A tuple (line, linenum, pos) pointer *past* the closing brace, or (line, len(lines), -1) if we never find a close. Note we ignore strings and comments when matching; and the line we return is the 'cleansed' line at linenum. """ line = clean_lines.elided[linenum] startchar = line[pos] if startchar not in '({[<': return (line, clean_lines.NumLines(), -1) if startchar == '(': endchar = ')' if startchar == '[': endchar = ']' if startchar == '{': endchar = '}' if startchar == '<': endchar = '>' # Check first line (end_pos, num_open) = FindEndOfExpressionInLine( line, pos, 0, startchar, endchar) if end_pos > -1: return (line, linenum, end_pos) # Continue scanning forward while linenum < clean_lines.NumLines() - 1: linenum += 1 line = clean_lines.elided[linenum] (end_pos, num_open) = FindEndOfExpressionInLine( line, 0, num_open, startchar, endchar) if end_pos > -1: return (line, linenum, end_pos) # Did not find endchar before end of file, give up return (line, clean_lines.NumLines(), -1)
[ "def", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "pos", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "startchar", "=", "line", "[", "pos", "]", "if", "startchar", "not", "in", "'({[<'", ":", "return", "(", "line", ",", "clean_lines", ".", "NumLines", "(", ")", ",", "-", "1", ")", "if", "startchar", "==", "'('", ":", "endchar", "=", "')'", "if", "startchar", "==", "'['", ":", "endchar", "=", "']'", "if", "startchar", "==", "'{'", ":", "endchar", "=", "'}'", "if", "startchar", "==", "'<'", ":", "endchar", "=", "'>'", "# Check first line", "(", "end_pos", ",", "num_open", ")", "=", "FindEndOfExpressionInLine", "(", "line", ",", "pos", ",", "0", ",", "startchar", ",", "endchar", ")", "if", "end_pos", ">", "-", "1", ":", "return", "(", "line", ",", "linenum", ",", "end_pos", ")", "# Continue scanning forward", "while", "linenum", "<", "clean_lines", ".", "NumLines", "(", ")", "-", "1", ":", "linenum", "+=", "1", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "(", "end_pos", ",", "num_open", ")", "=", "FindEndOfExpressionInLine", "(", "line", ",", "0", ",", "num_open", ",", "startchar", ",", "endchar", ")", "if", "end_pos", ">", "-", "1", ":", "return", "(", "line", ",", "linenum", ",", "end_pos", ")", "# Did not find endchar before end of file, give up", "return", "(", "line", ",", "clean_lines", ".", "NumLines", "(", ")", ",", "-", "1", ")" ]
https://github.com/s9xie/DSN/blob/065e49898d239f5c96be558616b2556eabc50351/scripts/cpp_lint.py#L1249-L1292
microsoft/checkedc-clang
a173fefde5d7877b7750e7ce96dd08cf18baebf2
lldb/examples/python/gdbremote.py
python
TerminalColors.bold
(self, on=True)
return ''
Enable or disable bold depending on the "on" parameter.
Enable or disable bold depending on the "on" parameter.
[ "Enable", "or", "disable", "bold", "depending", "on", "the", "on", "parameter", "." ]
def bold(self, on=True): '''Enable or disable bold depending on the "on" parameter.''' if self.enabled: if on: return "\x1b[1m" else: return "\x1b[22m" return ''
[ "def", "bold", "(", "self", ",", "on", "=", "True", ")", ":", "if", "self", ".", "enabled", ":", "if", "on", ":", "return", "\"\\x1b[1m\"", "else", ":", "return", "\"\\x1b[22m\"", "return", "''" ]
https://github.com/microsoft/checkedc-clang/blob/a173fefde5d7877b7750e7ce96dd08cf18baebf2/lldb/examples/python/gdbremote.py#L56-L63
eclipse/sumo
7132a9b8b6eea734bdec38479026b4d8c4336d03
tools/traci/_simulation.py
python
SimulationDomain.getNetBoundary
(self)
return self._getUniversal(tc.VAR_NET_BOUNDING_BOX)
getNetBoundary() -> ((double, double), (double, double)) The boundary box of the simulation network.
getNetBoundary() -> ((double, double), (double, double))
[ "getNetBoundary", "()", "-", ">", "((", "double", "double", ")", "(", "double", "double", "))" ]
def getNetBoundary(self): """getNetBoundary() -> ((double, double), (double, double)) The boundary box of the simulation network. """ return self._getUniversal(tc.VAR_NET_BOUNDING_BOX)
[ "def", "getNetBoundary", "(", "self", ")", ":", "return", "self", ".", "_getUniversal", "(", "tc", ".", "VAR_NET_BOUNDING_BOX", ")" ]
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/traci/_simulation.py#L516-L521
TGAC/KAT
e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216
deps/boost/tools/build/src/build/property.py
python
evaluate_conditionals_in_context
(properties, context)
return result
Removes all conditional properties which conditions are not met For those with met conditions, removes the condition. Properies in conditions are looked up in 'context'
Removes all conditional properties which conditions are not met For those with met conditions, removes the condition. Properies in conditions are looked up in 'context'
[ "Removes", "all", "conditional", "properties", "which", "conditions", "are", "not", "met", "For", "those", "with", "met", "conditions", "removes", "the", "condition", ".", "Properies", "in", "conditions", "are", "looked", "up", "in", "context" ]
def evaluate_conditionals_in_context (properties, context): """ Removes all conditional properties which conditions are not met For those with met conditions, removes the condition. Properies in conditions are looked up in 'context' """ if __debug__: from .property_set import PropertySet assert is_iterable_typed(properties, Property) assert isinstance(context, PropertySet) base = [] conditional = [] for p in properties: if p.condition: conditional.append (p) else: base.append (p) result = base[:] for p in conditional: # Evaluate condition # FIXME: probably inefficient if all(x in context for x in p.condition): result.append(Property(p.feature, p.value)) return result
[ "def", "evaluate_conditionals_in_context", "(", "properties", ",", "context", ")", ":", "if", "__debug__", ":", "from", ".", "property_set", "import", "PropertySet", "assert", "is_iterable_typed", "(", "properties", ",", "Property", ")", "assert", "isinstance", "(", "context", ",", "PropertySet", ")", "base", "=", "[", "]", "conditional", "=", "[", "]", "for", "p", "in", "properties", ":", "if", "p", ".", "condition", ":", "conditional", ".", "append", "(", "p", ")", "else", ":", "base", ".", "append", "(", "p", ")", "result", "=", "base", "[", ":", "]", "for", "p", "in", "conditional", ":", "# Evaluate condition", "# FIXME: probably inefficient", "if", "all", "(", "x", "in", "context", "for", "x", "in", "p", ".", "condition", ")", ":", "result", ".", "append", "(", "Property", "(", "p", ".", "feature", ",", "p", ".", "value", ")", ")", "return", "result" ]
https://github.com/TGAC/KAT/blob/e8870331de2b4bb0a1b3b91c6afb8fb9d59e9216/deps/boost/tools/build/src/build/property.py#L428-L454
Studio3T/robomongo
2411cd032e2e69b968dadda13ac91ca4ef3483b0
src/third-party/qscintilla-2.8.4/sources/Python/configure.py
python
ModuleConfiguration.get_sip_installs
(self, target_configuration)
return path, files
Return a tuple of the installation directory of the module's .sip files and a sequence of the names of each of the .sip files relative to the directory containing this configuration script. None is returned if the module's .sip files are not to be installed. target_configuration is the target configuration.
Return a tuple of the installation directory of the module's .sip files and a sequence of the names of each of the .sip files relative to the directory containing this configuration script. None is returned if the module's .sip files are not to be installed. target_configuration is the target configuration.
[ "Return", "a", "tuple", "of", "the", "installation", "directory", "of", "the", "module", "s", ".", "sip", "files", "and", "a", "sequence", "of", "the", "names", "of", "each", "of", "the", ".", "sip", "files", "relative", "to", "the", "directory", "containing", "this", "configuration", "script", ".", "None", "is", "returned", "if", "the", "module", "s", ".", "sip", "files", "are", "not", "to", "be", "installed", ".", "target_configuration", "is", "the", "target", "configuration", "." ]
def get_sip_installs(self, target_configuration): """ Return a tuple of the installation directory of the module's .sip files and a sequence of the names of each of the .sip files relative to the directory containing this configuration script. None is returned if the module's .sip files are not to be installed. target_configuration is the target configuration. """ if target_configuration.qsci_sip_dir == '': return None path = os.path.join(target_configuration.qsci_sip_dir, 'Qsci') files = glob.glob('sip/*.sip') return path, files
[ "def", "get_sip_installs", "(", "self", ",", "target_configuration", ")", ":", "if", "target_configuration", ".", "qsci_sip_dir", "==", "''", ":", "return", "None", "path", "=", "os", ".", "path", ".", "join", "(", "target_configuration", ".", "qsci_sip_dir", ",", "'Qsci'", ")", "files", "=", "glob", ".", "glob", "(", "'sip/*.sip'", ")", "return", "path", ",", "files" ]
https://github.com/Studio3T/robomongo/blob/2411cd032e2e69b968dadda13ac91ca4ef3483b0/src/third-party/qscintilla-2.8.4/sources/Python/configure.py#L269-L283
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/dashboard/dashboard/graph_json.py
python
_GetSubTestDict
(test_paths)
return subtests
Gets a dict of test suite path to sub test dict. Args: test_paths: List of test paths. Returns: Dictionary of test suite path to sub-test tree (see list_tests.GetSubTests).
Gets a dict of test suite path to sub test dict.
[ "Gets", "a", "dict", "of", "test", "suite", "path", "to", "sub", "test", "dict", "." ]
def _GetSubTestDict(test_paths): """Gets a dict of test suite path to sub test dict. Args: test_paths: List of test paths. Returns: Dictionary of test suite path to sub-test tree (see list_tests.GetSubTests). """ subtests = {} for test_path in test_paths: path_parts = test_path.split('/') bot_path = '/'.join(path_parts[0:2]) test_suite_path = '/'.join(path_parts[0:3]) test_suite = path_parts[2] if test_suite_path not in subtests: subtests[test_suite_path] = {} subtests[test_suite_path] = list_tests.GetSubTests(test_suite, [bot_path]) return subtests
[ "def", "_GetSubTestDict", "(", "test_paths", ")", ":", "subtests", "=", "{", "}", "for", "test_path", "in", "test_paths", ":", "path_parts", "=", "test_path", ".", "split", "(", "'/'", ")", "bot_path", "=", "'/'", ".", "join", "(", "path_parts", "[", "0", ":", "2", "]", ")", "test_suite_path", "=", "'/'", ".", "join", "(", "path_parts", "[", "0", ":", "3", "]", ")", "test_suite", "=", "path_parts", "[", "2", "]", "if", "test_suite_path", "not", "in", "subtests", ":", "subtests", "[", "test_suite_path", "]", "=", "{", "}", "subtests", "[", "test_suite_path", "]", "=", "list_tests", ".", "GetSubTests", "(", "test_suite", ",", "[", "bot_path", "]", ")", "return", "subtests" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/dashboard/dashboard/graph_json.py#L508-L527
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/layers/control_flow.py
python
StaticRNN.step
(self)
return BlockGuardWithCompletion(self)
Define operators in each step. step is used in :code:`with` block, OP in :code:`with` block will be executed sequence_len times (sequence_len is the length of input)
Define operators in each step. step is used in :code:`with` block, OP in :code:`with` block will be executed sequence_len times (sequence_len is the length of input)
[ "Define", "operators", "in", "each", "step", ".", "step", "is", "used", "in", ":", "code", ":", "with", "block", "OP", "in", ":", "code", ":", "with", "block", "will", "be", "executed", "sequence_len", "times", "(", "sequence_len", "is", "the", "length", "of", "input", ")" ]
def step(self): """ Define operators in each step. step is used in :code:`with` block, OP in :code:`with` block will be executed sequence_len times (sequence_len is the length of input) """ return BlockGuardWithCompletion(self)
[ "def", "step", "(", "self", ")", ":", "return", "BlockGuardWithCompletion", "(", "self", ")" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/layers/control_flow.py#L546-L551