nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
NREL/EnergyPlus
fadc5973b85c70e8cc923efb69c144e808a26078
src/EnergyPlus/api/datatransfer.py
python
DataExchange.warmup_flag
(self, state: c_void_p)
return self.api.warmupFlag(state) == 1
Gets a flag for whether the warmup flag is currently on, signaling that EnergyPlus is still in the process of converging on warmup days. The C API returns an integer where 1 is yes and 0 is no, this simply wraps that with a bool conversion. :param state: An active EnergyPlus "state" that is returned from a call to `api.state_manager.new_state()`. :return: A boolean indicating whether the warmup flag is on.
Gets a flag for whether the warmup flag is currently on, signaling that EnergyPlus is still in the process of converging on warmup days. The C API returns an integer where 1 is yes and 0 is no, this simply wraps that with a bool conversion.
[ "Gets", "a", "flag", "for", "whether", "the", "warmup", "flag", "is", "currently", "on", "signaling", "that", "EnergyPlus", "is", "still", "in", "the", "process", "of", "converging", "on", "warmup", "days", ".", "The", "C", "API", "returns", "an", "integer", "where", "1", "is", "yes", "and", "0", "is", "no", "this", "simply", "wraps", "that", "with", "a", "bool", "conversion", "." ]
def warmup_flag(self, state: c_void_p) -> bool: """ Gets a flag for whether the warmup flag is currently on, signaling that EnergyPlus is still in the process of converging on warmup days. The C API returns an integer where 1 is yes and 0 is no, this simply wraps that with a bool conversion. :param state: An active EnergyPlus "state" that is returned from a call to `api.state_manager.new_state()`. :return: A boolean indicating whether the warmup flag is on. """ return self.api.warmupFlag(state) == 1
[ "def", "warmup_flag", "(", "self", ",", "state", ":", "c_void_p", ")", "->", "bool", ":", "return", "self", ".", "api", ".", "warmupFlag", "(", "state", ")", "==", "1" ]
https://github.com/NREL/EnergyPlus/blob/fadc5973b85c70e8cc923efb69c144e808a26078/src/EnergyPlus/api/datatransfer.py#L1050-L1059
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Image.GetDataBuffer
(*args, **kwargs)
return _core_.Image_GetDataBuffer(*args, **kwargs)
GetDataBuffer(self) -> PyObject Returns a writable Python buffer object that is pointing at the RGB image data buffer inside the wx.Image. You need to ensure that you do not use this buffer object after the image has been destroyed.
GetDataBuffer(self) -> PyObject
[ "GetDataBuffer", "(", "self", ")", "-", ">", "PyObject" ]
def GetDataBuffer(*args, **kwargs): """ GetDataBuffer(self) -> PyObject Returns a writable Python buffer object that is pointing at the RGB image data buffer inside the wx.Image. You need to ensure that you do not use this buffer object after the image has been destroyed. """ return _core_.Image_GetDataBuffer(*args, **kwargs)
[ "def", "GetDataBuffer", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Image_GetDataBuffer", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L3375-L3383
shizhuolin/PyCTP
3b3e160ef7ace423a04a09cc034d939db2444ef1
APIToPyCTP.py
python
generatestructcppheadercode
(codetree)
struct header
struct header
[ "struct", "header" ]
def generatestructcppheadercode(codetree): """struct header""" cppheadercodetemplate = "./src/UserApiStruct.h.template" cppheadercodefile = "./src/UserApiStruct.h" cppheadercode = "" for (key, value) in codetree.items(): if(value['type'] == 'struct'): cppheadercode += "\nPyObject *PyCTP_PyDict_FromStruct(" + key + " *p"+key.replace("CThostFtdc", "")+");" cppheadercode += "\nint PyCTP_Struct_FromPyDict(" + key + " *p" + key.replace("CThostFtdc", "") + ", PyObject *dict);" import re, chardet with open(cppheadercodetemplate, 'rb') as f: data = f.read() encoding = chardet.detect(data) sourcecode = data.decode(encoding['encoding']) sourcecode = re.sub(r'\{\{\s*body\s*\}\}', cppheadercode, sourcecode) with open(cppheadercodefile, 'wt') as f: f.write(sourcecode)
[ "def", "generatestructcppheadercode", "(", "codetree", ")", ":", "cppheadercodetemplate", "=", "\"./src/UserApiStruct.h.template\"", "cppheadercodefile", "=", "\"./src/UserApiStruct.h\"", "cppheadercode", "=", "\"\"", "for", "(", "key", ",", "value", ")", "in", "codetree", ".", "items", "(", ")", ":", "if", "(", "value", "[", "'type'", "]", "==", "'struct'", ")", ":", "cppheadercode", "+=", "\"\\nPyObject *PyCTP_PyDict_FromStruct(\"", "+", "key", "+", "\" *p\"", "+", "key", ".", "replace", "(", "\"CThostFtdc\"", ",", "\"\"", ")", "+", "\");\"", "cppheadercode", "+=", "\"\\nint PyCTP_Struct_FromPyDict(\"", "+", "key", "+", "\" *p\"", "+", "key", ".", "replace", "(", "\"CThostFtdc\"", ",", "\"\"", ")", "+", "\", PyObject *dict);\"", "import", "re", ",", "chardet", "with", "open", "(", "cppheadercodetemplate", ",", "'rb'", ")", "as", "f", ":", "data", "=", "f", ".", "read", "(", ")", "encoding", "=", "chardet", ".", "detect", "(", "data", ")", "sourcecode", "=", "data", ".", "decode", "(", "encoding", "[", "'encoding'", "]", ")", "sourcecode", "=", "re", ".", "sub", "(", "r'\\{\\{\\s*body\\s*\\}\\}'", ",", "cppheadercode", ",", "sourcecode", ")", "with", "open", "(", "cppheadercodefile", ",", "'wt'", ")", "as", "f", ":", "f", ".", "write", "(", "sourcecode", ")" ]
https://github.com/shizhuolin/PyCTP/blob/3b3e160ef7ace423a04a09cc034d939db2444ef1/APIToPyCTP.py#L179-L199
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
tools/isolate/isolate.py
python
load_results
(resultfile)
return data
Loads the previous results as an optimization.
Loads the previous results as an optimization.
[ "Loads", "the", "previous", "results", "as", "an", "optimization", "." ]
def load_results(resultfile): """Loads the previous results as an optimization.""" data = {} if resultfile and os.path.isfile(resultfile): resultfile = os.path.abspath(resultfile) with open(resultfile, 'r') as f: data = json.load(f) logging.debug('Loaded %s' % resultfile) else: resultfile = os.path.abspath(resultfile) logging.debug('%s was not found' % resultfile) # Works with native os.path.sep but stores as '/'. if 'files' in data and os.path.sep != '/': data['files'] = dict( (k.replace('/', os.path.sep), v) for k, v in data['files'].iteritems()) return data
[ "def", "load_results", "(", "resultfile", ")", ":", "data", "=", "{", "}", "if", "resultfile", "and", "os", ".", "path", ".", "isfile", "(", "resultfile", ")", ":", "resultfile", "=", "os", ".", "path", ".", "abspath", "(", "resultfile", ")", "with", "open", "(", "resultfile", ",", "'r'", ")", "as", "f", ":", "data", "=", "json", ".", "load", "(", "f", ")", "logging", ".", "debug", "(", "'Loaded %s'", "%", "resultfile", ")", "else", ":", "resultfile", "=", "os", ".", "path", ".", "abspath", "(", "resultfile", ")", "logging", ".", "debug", "(", "'%s was not found'", "%", "resultfile", ")", "# Works with native os.path.sep but stores as '/'.", "if", "'files'", "in", "data", "and", "os", ".", "path", ".", "sep", "!=", "'/'", ":", "data", "[", "'files'", "]", "=", "dict", "(", "(", "k", ".", "replace", "(", "'/'", ",", "os", ".", "path", ".", "sep", ")", ",", "v", ")", "for", "k", ",", "v", "in", "data", "[", "'files'", "]", ".", "iteritems", "(", ")", ")", "return", "data" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/tools/isolate/isolate.py#L229-L246
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/contrib/quantize/quantize_transpiler.py
python
_quantized_scale_name
(var_name)
return "%s.scale" % (var_name)
Return quantized variable name for the input `var_name`.
Return quantized variable name for the input `var_name`.
[ "Return", "quantized", "variable", "name", "for", "the", "input", "var_name", "." ]
def _quantized_scale_name(var_name): """ Return quantized variable name for the input `var_name`. """ return "%s.scale" % (var_name)
[ "def", "_quantized_scale_name", "(", "var_name", ")", ":", "return", "\"%s.scale\"", "%", "(", "var_name", ")" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/contrib/quantize/quantize_transpiler.py#L48-L52
envoyproxy/envoy
65541accdafe255e72310b4298d646e091da2d80
tools/api_proto_plugin/type_context.py
python
TypeContext.extend_field
(self, index, name)
return self._extend([2, index], 'field', name)
Extend type context with a field. Args: index: field index in message. name: field name.
Extend type context with a field.
[ "Extend", "type", "context", "with", "a", "field", "." ]
def extend_field(self, index, name): """Extend type context with a field. Args: index: field index in message. name: field name. """ return self._extend([2, index], 'field', name)
[ "def", "extend_field", "(", "self", ",", "index", ",", "name", ")", ":", "return", "self", ".", "_extend", "(", "[", "2", ",", "index", "]", ",", "'field'", ",", "name", ")" ]
https://github.com/envoyproxy/envoy/blob/65541accdafe255e72310b4298d646e091da2d80/tools/api_proto_plugin/type_context.py#L191-L198
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/middle/passes/fusing/resnet_optimization.py
python
_simple_stride_prop
(graph: Graph, node: Node, spatial_dims, supported=True)
This function handles stride propagation for op nodes. If node is in supported ops dict so this is supported operation and we can propagate stride directly via this op (stride_prop will be set by using bottom stride_prop), otherwise we can't and stride_prop attr will be set as 1,1,1,1
This function handles stride propagation for op nodes. If node is in supported ops dict so this is supported operation and we can propagate stride directly via this op (stride_prop will be set by using bottom stride_prop), otherwise we can't and stride_prop attr will be set as 1,1,1,1
[ "This", "function", "handles", "stride", "propagation", "for", "op", "nodes", ".", "If", "node", "is", "in", "supported", "ops", "dict", "so", "this", "is", "supported", "operation", "and", "we", "can", "propagate", "stride", "directly", "via", "this", "op", "(", "stride_prop", "will", "be", "set", "by", "using", "bottom", "stride_prop", ")", "otherwise", "we", "can", "t", "and", "stride_prop", "attr", "will", "be", "set", "as", "1", "1", "1", "1" ]
def _simple_stride_prop(graph: Graph, node: Node, spatial_dims, supported=True): """ This function handles stride propagation for op nodes. If node is in supported ops dict so this is supported operation and we can propagate stride directly via this op (stride_prop will be set by using bottom stride_prop), otherwise we can't and stride_prop attr will be set as 1,1,1,1 """ next_ops = get_next_operation(node) stride_props, all_ops_are_valid = _check_next_ops(next_ops) if not supported or not all_ops_are_valid: # We have to insert pooling layers for op in next_ops: if op.has_valid('stride_prop') and not np.array_equal(op.stride_prop[spatial_dims], mo_array([1, 1])) and \ (op.has_valid('has_stride') == False or op.soft_get('has_stride') == False): _insert_pooling(graph, node.out_node(), op, spatial_dims) # If Convolution is valid then set `stride_prop` to Convolution stride node['stride_prop'] = mo_array([1, 1, 1, 1]) return for op in next_ops: if op.soft_get('has_stride') == True: op.stride = mo_array([1, 1, 1, 1]) log.debug("STRIDE PROP: {} {} strides was moved upper via {}".format(op.type, op.name, node.name)) node['stride_prop'] = mo_array(stride_props[0]) if len(stride_props) > 0 else mo_array([1, 1, 1, 1]) node['is_partial_inferred'] = False _clean_fw_tensor_attrs(node.out_node())
[ "def", "_simple_stride_prop", "(", "graph", ":", "Graph", ",", "node", ":", "Node", ",", "spatial_dims", ",", "supported", "=", "True", ")", ":", "next_ops", "=", "get_next_operation", "(", "node", ")", "stride_props", ",", "all_ops_are_valid", "=", "_check_next_ops", "(", "next_ops", ")", "if", "not", "supported", "or", "not", "all_ops_are_valid", ":", "# We have to insert pooling layers", "for", "op", "in", "next_ops", ":", "if", "op", ".", "has_valid", "(", "'stride_prop'", ")", "and", "not", "np", ".", "array_equal", "(", "op", ".", "stride_prop", "[", "spatial_dims", "]", ",", "mo_array", "(", "[", "1", ",", "1", "]", ")", ")", "and", "(", "op", ".", "has_valid", "(", "'has_stride'", ")", "==", "False", "or", "op", ".", "soft_get", "(", "'has_stride'", ")", "==", "False", ")", ":", "_insert_pooling", "(", "graph", ",", "node", ".", "out_node", "(", ")", ",", "op", ",", "spatial_dims", ")", "# If Convolution is valid then set `stride_prop` to Convolution stride", "node", "[", "'stride_prop'", "]", "=", "mo_array", "(", "[", "1", ",", "1", ",", "1", ",", "1", "]", ")", "return", "for", "op", "in", "next_ops", ":", "if", "op", ".", "soft_get", "(", "'has_stride'", ")", "==", "True", ":", "op", ".", "stride", "=", "mo_array", "(", "[", "1", ",", "1", ",", "1", ",", "1", "]", ")", "log", ".", "debug", "(", "\"STRIDE PROP: {} {} strides was moved upper via {}\"", ".", "format", "(", "op", ".", "type", ",", "op", ".", "name", ",", "node", ".", "name", ")", ")", "node", "[", "'stride_prop'", "]", "=", "mo_array", "(", "stride_props", "[", "0", "]", ")", "if", "len", "(", "stride_props", ")", ">", "0", "else", "mo_array", "(", "[", "1", ",", "1", ",", "1", ",", "1", "]", ")", "node", "[", "'is_partial_inferred'", "]", "=", "False", "_clean_fw_tensor_attrs", "(", "node", ".", "out_node", "(", ")", ")" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/middle/passes/fusing/resnet_optimization.py#L59-L85
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/py_vulcanize/third_party/rcssmin/_setup/py3/commands.py
python
Build.initialize_options
(self)
Prepare for new options
Prepare for new options
[ "Prepare", "for", "new", "options" ]
def initialize_options(self): """ Prepare for new options """ _build.build.initialize_options(self) if 'build' in _option_defaults: for opt_name, default in _option_defaults['build']: setattr(self, opt_name, default)
[ "def", "initialize_options", "(", "self", ")", ":", "_build", ".", "build", ".", "initialize_options", "(", "self", ")", "if", "'build'", "in", "_option_defaults", ":", "for", "opt_name", ",", "default", "in", "_option_defaults", "[", "'build'", "]", ":", "setattr", "(", "self", ",", "opt_name", ",", "default", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/py_vulcanize/third_party/rcssmin/_setup/py3/commands.py#L251-L256
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/ops/operations/array_ops.py
python
Argmin.__init__
(self, axis=-1, output_type=mstype.int32)
Initialize Argmin
Initialize Argmin
[ "Initialize", "Argmin" ]
def __init__(self, axis=-1, output_type=mstype.int32): """Initialize Argmin""" self.init_prim_io_names(inputs=['x'], outputs=['output']) validator.check_value_type("axis", axis, [int], self.name) validator.check_type_name("output_type", output_type, [mstype.int32, mstype.int64], self.name) self.axis = axis self.add_prim_attr('output_type', output_type)
[ "def", "__init__", "(", "self", ",", "axis", "=", "-", "1", ",", "output_type", "=", "mstype", ".", "int32", ")", ":", "self", ".", "init_prim_io_names", "(", "inputs", "=", "[", "'x'", "]", ",", "outputs", "=", "[", "'output'", "]", ")", "validator", ".", "check_value_type", "(", "\"axis\"", ",", "axis", ",", "[", "int", "]", ",", "self", ".", "name", ")", "validator", ".", "check_type_name", "(", "\"output_type\"", ",", "output_type", ",", "[", "mstype", ".", "int32", ",", "mstype", ".", "int64", "]", ",", "self", ".", "name", ")", "self", ".", "axis", "=", "axis", "self", ".", "add_prim_attr", "(", "'output_type'", ",", "output_type", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/operations/array_ops.py#L1777-L1783
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/python/summary/event_file_inspector.py
python
get_dict_to_print
(field_to_obs)
return output
Transform the field-to-obs mapping into a printable dictionary. Args: field_to_obs: Dict that maps string field to `Observation` list. Returns: A dict with the keys and values to print to console.
Transform the field-to-obs mapping into a printable dictionary.
[ "Transform", "the", "field", "-", "to", "-", "obs", "mapping", "into", "a", "printable", "dictionary", "." ]
def get_dict_to_print(field_to_obs): """Transform the field-to-obs mapping into a printable dictionary. Args: field_to_obs: Dict that maps string field to `Observation` list. Returns: A dict with the keys and values to print to console. """ def compressed_steps(steps): return {'num_steps': len(set(steps)), 'min_step': min(steps), 'max_step': max(steps), 'last_step': steps[-1], 'first_step': steps[0], 'outoforder_steps': get_out_of_order(steps)} def full_steps(steps): return {'steps': steps, 'outoforder_steps': get_out_of_order(steps)} output = {} for field, observations in field_to_obs.items(): if not observations: output[field] = None continue steps = [x['step'] for x in observations] if field in SHORT_FIELDS: output[field] = compressed_steps(steps) if field in LONG_FIELDS: output[field] = full_steps(steps) return output
[ "def", "get_dict_to_print", "(", "field_to_obs", ")", ":", "def", "compressed_steps", "(", "steps", ")", ":", "return", "{", "'num_steps'", ":", "len", "(", "set", "(", "steps", ")", ")", ",", "'min_step'", ":", "min", "(", "steps", ")", ",", "'max_step'", ":", "max", "(", "steps", ")", ",", "'last_step'", ":", "steps", "[", "-", "1", "]", ",", "'first_step'", ":", "steps", "[", "0", "]", ",", "'outoforder_steps'", ":", "get_out_of_order", "(", "steps", ")", "}", "def", "full_steps", "(", "steps", ")", ":", "return", "{", "'steps'", ":", "steps", ",", "'outoforder_steps'", ":", "get_out_of_order", "(", "steps", ")", "}", "output", "=", "{", "}", "for", "field", ",", "observations", "in", "field_to_obs", ".", "items", "(", ")", ":", "if", "not", "observations", ":", "output", "[", "field", "]", "=", "None", "continue", "steps", "=", "[", "x", "[", "'step'", "]", "for", "x", "in", "observations", "]", "if", "field", "in", "SHORT_FIELDS", ":", "output", "[", "field", "]", "=", "compressed_steps", "(", "steps", ")", "if", "field", "in", "LONG_FIELDS", ":", "output", "[", "field", "]", "=", "full_steps", "(", "steps", ")", "return", "output" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/summary/event_file_inspector.py#L254-L287
cvxpy/cvxpy
5165b4fb750dfd237de8659383ef24b4b2e33aaf
cvxpy/atoms/affine/reshape.py
python
reshape.validate_arguments
(self)
Checks that the new shape has the same number of entries as the old.
Checks that the new shape has the same number of entries as the old.
[ "Checks", "that", "the", "new", "shape", "has", "the", "same", "number", "of", "entries", "as", "the", "old", "." ]
def validate_arguments(self) -> None: """Checks that the new shape has the same number of entries as the old. """ old_len = self.args[0].size new_len = np.prod(self._shape, dtype=int) if not old_len == new_len: raise ValueError( "Invalid reshape dimensions %s." % (self._shape,) )
[ "def", "validate_arguments", "(", "self", ")", "->", "None", ":", "old_len", "=", "self", ".", "args", "[", "0", "]", ".", "size", "new_len", "=", "np", ".", "prod", "(", "self", ".", "_shape", ",", "dtype", "=", "int", ")", "if", "not", "old_len", "==", "new_len", ":", "raise", "ValueError", "(", "\"Invalid reshape dimensions %s.\"", "%", "(", "self", ".", "_shape", ",", ")", ")" ]
https://github.com/cvxpy/cvxpy/blob/5165b4fb750dfd237de8659383ef24b4b2e33aaf/cvxpy/atoms/affine/reshape.py#L72-L80
luliyucoordinate/Leetcode
96afcdc54807d1d184e881a075d1dbf3371e31fb
src/0919-Complete-Binary-Tree-Inserter/0919.py
python
CBTInserter.__init__
(self, root)
:type root: TreeNode
:type root: TreeNode
[ ":", "type", "root", ":", "TreeNode" ]
def __init__(self, root): """ :type root: TreeNode """ self.data = list() self.data.append(root) for node in self.data: if node.left: self.data.append(node.left) if node.right: self.data.append(node.right)
[ "def", "__init__", "(", "self", ",", "root", ")", ":", "self", ".", "data", "=", "list", "(", ")", "self", ".", "data", ".", "append", "(", "root", ")", "for", "node", "in", "self", ".", "data", ":", "if", "node", ".", "left", ":", "self", ".", "data", ".", "append", "(", "node", ".", "left", ")", "if", "node", ".", "right", ":", "self", ".", "data", ".", "append", "(", "node", ".", "right", ")" ]
https://github.com/luliyucoordinate/Leetcode/blob/96afcdc54807d1d184e881a075d1dbf3371e31fb/src/0919-Complete-Binary-Tree-Inserter/0919.py#L9-L19
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/_collections_abc.py
python
MutableMapping.clear
(self)
D.clear() -> None. Remove all items from D.
D.clear() -> None. Remove all items from D.
[ "D", ".", "clear", "()", "-", ">", "None", ".", "Remove", "all", "items", "from", "D", "." ]
def clear(self): 'D.clear() -> None. Remove all items from D.' try: while True: self.popitem() except KeyError: pass
[ "def", "clear", "(", "self", ")", ":", "try", ":", "while", "True", ":", "self", ".", "popitem", "(", ")", "except", "KeyError", ":", "pass" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/_collections_abc.py#L816-L822
intel/llvm
e6d0547e9d99b5a56430c4749f6c7e328bf221ab
lldb/examples/python/bsd.py
python
Interactive.default
(self, line)
return True
Catch all for unknown command, which will exit the interpreter.
Catch all for unknown command, which will exit the interpreter.
[ "Catch", "all", "for", "unknown", "command", "which", "will", "exit", "the", "interpreter", "." ]
def default(self, line): '''Catch all for unknown command, which will exit the interpreter.''' print("unknown command: %s" % line) return True
[ "def", "default", "(", "self", ",", "line", ")", ":", "print", "(", "\"unknown command: %s\"", "%", "line", ")", "return", "True" ]
https://github.com/intel/llvm/blob/e6d0547e9d99b5a56430c4749f6c7e328bf221ab/lldb/examples/python/bsd.py#L220-L223
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/bayesflow/python/ops/monte_carlo_impl.py
python
expectation
(f, p, z=None, n=None, seed=None, name='expectation')
r"""Monte Carlo estimate of an expectation: `E_p[f(Z)]` with sample mean. This `Op` returns ``` n^{-1} sum_{i=1}^n f(z_i), where z_i ~ p \approx E_p[f(Z)] ``` User supplies either `Tensor` of samples `z`, or number of samples to draw `n` Args: f: Callable mapping samples from `p` to `Tensors`. p: `tf.contrib.distributions.Distribution`. z: `Tensor` of samples from `p`, produced by `p.sample` for some `n`. n: Integer `Tensor`. Number of samples to generate if `z` is not provided. seed: Python integer to seed the random number generator. name: A name to give this `Op`. Returns: A `Tensor` with the same `dtype` as `p`. Example: ```python N_samples = 10000 distributions = tf.contrib.distributions dist = distributions.Uniform([0.0, 0.0], [1.0, 2.0]) elementwise_mean = lambda x: x mean_sum = lambda x: tf.reduce_sum(x, 1) estimate_elementwise_mean_tf = monte_carlo.expectation(elementwise_mean, dist, n=N_samples) estimate_mean_sum_tf = monte_carlo.expectation(mean_sum, dist, n=N_samples) with tf.Session() as sess: estimate_elementwise_mean, estimate_mean_sum = ( sess.run([estimate_elementwise_mean_tf, estimate_mean_sum_tf])) print estimate_elementwise_mean >>> np.array([ 0.50018013 1.00097895], dtype=np.float32) print estimate_mean_sum >>> 1.49571 ```
r"""Monte Carlo estimate of an expectation: `E_p[f(Z)]` with sample mean.
[ "r", "Monte", "Carlo", "estimate", "of", "an", "expectation", ":", "E_p", "[", "f", "(", "Z", ")", "]", "with", "sample", "mean", "." ]
def expectation(f, p, z=None, n=None, seed=None, name='expectation'): r"""Monte Carlo estimate of an expectation: `E_p[f(Z)]` with sample mean. This `Op` returns ``` n^{-1} sum_{i=1}^n f(z_i), where z_i ~ p \approx E_p[f(Z)] ``` User supplies either `Tensor` of samples `z`, or number of samples to draw `n` Args: f: Callable mapping samples from `p` to `Tensors`. p: `tf.contrib.distributions.Distribution`. z: `Tensor` of samples from `p`, produced by `p.sample` for some `n`. n: Integer `Tensor`. Number of samples to generate if `z` is not provided. seed: Python integer to seed the random number generator. name: A name to give this `Op`. Returns: A `Tensor` with the same `dtype` as `p`. Example: ```python N_samples = 10000 distributions = tf.contrib.distributions dist = distributions.Uniform([0.0, 0.0], [1.0, 2.0]) elementwise_mean = lambda x: x mean_sum = lambda x: tf.reduce_sum(x, 1) estimate_elementwise_mean_tf = monte_carlo.expectation(elementwise_mean, dist, n=N_samples) estimate_mean_sum_tf = monte_carlo.expectation(mean_sum, dist, n=N_samples) with tf.Session() as sess: estimate_elementwise_mean, estimate_mean_sum = ( sess.run([estimate_elementwise_mean_tf, estimate_mean_sum_tf])) print estimate_elementwise_mean >>> np.array([ 0.50018013 1.00097895], dtype=np.float32) print estimate_mean_sum >>> 1.49571 ``` """ with ops.name_scope(name, values=[n, z]): z = _get_samples(p, z, n, seed) return _sample_mean(f(z))
[ "def", "expectation", "(", "f", ",", "p", ",", "z", "=", "None", ",", "n", "=", "None", ",", "seed", "=", "None", ",", "name", "=", "'expectation'", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "values", "=", "[", "n", ",", "z", "]", ")", ":", "z", "=", "_get_samples", "(", "p", ",", "z", ",", "n", ",", "seed", ")", "return", "_sample_mean", "(", "f", "(", "z", ")", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/bayesflow/python/ops/monte_carlo_impl.py#L198-L252
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py
python
PanedWindow.proxy_forget
(self)
return self.proxy("forget")
Remove the proxy from the display.
Remove the proxy from the display.
[ "Remove", "the", "proxy", "from", "the", "display", "." ]
def proxy_forget(self): """Remove the proxy from the display. """ return self.proxy("forget")
[ "def", "proxy_forget", "(", "self", ")", ":", "return", "self", ".", "proxy", "(", "\"forget\"", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py#L3609-L3612
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/pdb.py
python
Pdb.do_display
(self, arg)
display [expression] Display the value of the expression if it changed, each time execution stops in the current frame. Without expression, list all display expressions for the current frame.
display [expression]
[ "display", "[", "expression", "]" ]
def do_display(self, arg): """display [expression] Display the value of the expression if it changed, each time execution stops in the current frame. Without expression, list all display expressions for the current frame. """ if not arg: self.message('Currently displaying:') for item in self.displaying.get(self.curframe, {}).items(): self.message('%s: %r' % item) else: val = self._getval_except(arg) self.displaying.setdefault(self.curframe, {})[arg] = val self.message('display %s: %r' % (arg, val))
[ "def", "do_display", "(", "self", ",", "arg", ")", ":", "if", "not", "arg", ":", "self", ".", "message", "(", "'Currently displaying:'", ")", "for", "item", "in", "self", ".", "displaying", ".", "get", "(", "self", ".", "curframe", ",", "{", "}", ")", ".", "items", "(", ")", ":", "self", ".", "message", "(", "'%s: %r'", "%", "item", ")", "else", ":", "val", "=", "self", ".", "_getval_except", "(", "arg", ")", "self", ".", "displaying", ".", "setdefault", "(", "self", ".", "curframe", ",", "{", "}", ")", "[", "arg", "]", "=", "val", "self", ".", "message", "(", "'display %s: %r'", "%", "(", "arg", ",", "val", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/pdb.py#L1334-L1349
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/parso/py3/parso/cache.py
python
load_module
(hashed_grammar, file_io, cache_path=None)
Returns a module or None, if it fails.
Returns a module or None, if it fails.
[ "Returns", "a", "module", "or", "None", "if", "it", "fails", "." ]
def load_module(hashed_grammar, file_io, cache_path=None): """ Returns a module or None, if it fails. """ p_time = file_io.get_last_modified() if p_time is None: return None try: module_cache_item = parser_cache[hashed_grammar][file_io.path] if p_time <= module_cache_item.change_time: module_cache_item.last_used = time.time() return module_cache_item.node except KeyError: return _load_from_file_system( hashed_grammar, file_io.path, p_time, cache_path=cache_path )
[ "def", "load_module", "(", "hashed_grammar", ",", "file_io", ",", "cache_path", "=", "None", ")", ":", "p_time", "=", "file_io", ".", "get_last_modified", "(", ")", "if", "p_time", "is", "None", ":", "return", "None", "try", ":", "module_cache_item", "=", "parser_cache", "[", "hashed_grammar", "]", "[", "file_io", ".", "path", "]", "if", "p_time", "<=", "module_cache_item", ".", "change_time", ":", "module_cache_item", ".", "last_used", "=", "time", ".", "time", "(", ")", "return", "module_cache_item", ".", "node", "except", "KeyError", ":", "return", "_load_from_file_system", "(", "hashed_grammar", ",", "file_io", ".", "path", ",", "p_time", ",", "cache_path", "=", "cache_path", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/parso/py3/parso/cache.py#L113-L132
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py
python
Wm.wm_protocol
(self, name=None, func=None)
return self.tk.call( 'wm', 'protocol', self._w, name, command)
Bind function FUNC to command NAME for this widget. Return the function bound to NAME if None is given. NAME could be e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW".
Bind function FUNC to command NAME for this widget. Return the function bound to NAME if None is given. NAME could be e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW".
[ "Bind", "function", "FUNC", "to", "command", "NAME", "for", "this", "widget", ".", "Return", "the", "function", "bound", "to", "NAME", "if", "None", "is", "given", ".", "NAME", "could", "be", "e", ".", "g", ".", "WM_SAVE_YOURSELF", "or", "WM_DELETE_WINDOW", "." ]
def wm_protocol(self, name=None, func=None): """Bind function FUNC to command NAME for this widget. Return the function bound to NAME if None is given. NAME could be e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW".""" if hasattr(func, '__call__'): command = self._register(func) else: command = func return self.tk.call( 'wm', 'protocol', self._w, name, command)
[ "def", "wm_protocol", "(", "self", ",", "name", "=", "None", ",", "func", "=", "None", ")", ":", "if", "hasattr", "(", "func", ",", "'__call__'", ")", ":", "command", "=", "self", ".", "_register", "(", "func", ")", "else", ":", "command", "=", "func", "return", "self", ".", "tk", ".", "call", "(", "'wm'", ",", "'protocol'", ",", "self", ".", "_w", ",", "name", ",", "command", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tkinter.py#L1678-L1687
turi-code/SFrame
796b9bdfb2fa1b881d82080754643c7e68629cd2
oss_src/unity/python/sframe/extensions.py
python
_build_native_function_call
(fn)
return _Closure(native_function_name, arglist)
If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = gl.extensions.add fn = lambda x: gl.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure.
If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions.
[ "If", "fn", "can", "be", "interpreted", "and", "handled", "as", "a", "native", "function", ":", "i", ".", "e", ".", "fn", "is", "one", "of", "the", "extensions", "or", "fn", "is", "a", "simple", "lambda", "closure", "using", "one", "of", "the", "extensions", "." ]
def _build_native_function_call(fn): """ If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = gl.extensions.add fn = lambda x: gl.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure. """ # See if fn is the native function itself native_function_name = _get_toolkit_function_name_from_function(fn) if native_function_name != "": # yup! # generate an "identity" argument list argnames = _get_argument_list_from_toolkit_function_name(native_function_name) arglist = [[0, i] for i in range(len(argnames))] return _Closure(native_function_name, arglist) # ok. its not a native function from .util.lambda_closure_capture import translate from .util.lambda_closure_capture import Parameter # Lets see if it is a simple lambda capture = translate(fn) # ok. build up the closure arguments # Try to pick up the lambda function = _descend_namespace(capture.caller_globals, capture.closure_fn_name) native_function_name = _get_toolkit_function_name_from_function(function) if native_function_name == "": raise RuntimeError("Lambda does not contain a native function") argnames = _get_argument_list_from_toolkit_function_name(native_function_name) # ok. build up the argument list. this is mildly annoying due to the mix of # positional and named arguments # make an argument list with a placeholder for everything first arglist = [[-1, i] for i in argnames] # loop through the positional arguments for i in range(len(capture.positional_args)): arg = capture.positional_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[i] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[i] = [1, arg] # now. the named arguments are somewhat annoying for i in capture.named_args: arg = capture.named_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[argnames.index(i)] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[argnames.index(i)] = [1, arg] # done. Make sure all arguments are filled for i in arglist: if i[0] == -1: raise RuntimeError("Incomplete function specification") # attempt to recursively break down any other functions import inspect for i in range(len(arglist)): if arglist[i][0] == 1 and inspect.isfunction(arglist[i][1]): try: arglist[i][1] = _build_native_function_call(arglist[i][1]) except: pass return _Closure(native_function_name, arglist)
[ "def", "_build_native_function_call", "(", "fn", ")", ":", "# See if fn is the native function itself", "native_function_name", "=", "_get_toolkit_function_name_from_function", "(", "fn", ")", "if", "native_function_name", "!=", "\"\"", ":", "# yup!", "# generate an \"identity\" argument list", "argnames", "=", "_get_argument_list_from_toolkit_function_name", "(", "native_function_name", ")", "arglist", "=", "[", "[", "0", ",", "i", "]", "for", "i", "in", "range", "(", "len", "(", "argnames", ")", ")", "]", "return", "_Closure", "(", "native_function_name", ",", "arglist", ")", "# ok. its not a native function", "from", ".", "util", ".", "lambda_closure_capture", "import", "translate", "from", ".", "util", ".", "lambda_closure_capture", "import", "Parameter", "# Lets see if it is a simple lambda", "capture", "=", "translate", "(", "fn", ")", "# ok. build up the closure arguments", "# Try to pick up the lambda", "function", "=", "_descend_namespace", "(", "capture", ".", "caller_globals", ",", "capture", ".", "closure_fn_name", ")", "native_function_name", "=", "_get_toolkit_function_name_from_function", "(", "function", ")", "if", "native_function_name", "==", "\"\"", ":", "raise", "RuntimeError", "(", "\"Lambda does not contain a native function\"", ")", "argnames", "=", "_get_argument_list_from_toolkit_function_name", "(", "native_function_name", ")", "# ok. build up the argument list. this is mildly annoying due to the mix of", "# positional and named arguments", "# make an argument list with a placeholder for everything first", "arglist", "=", "[", "[", "-", "1", ",", "i", "]", "for", "i", "in", "argnames", "]", "# loop through the positional arguments", "for", "i", "in", "range", "(", "len", "(", "capture", ".", "positional_args", ")", ")", ":", "arg", "=", "capture", ".", "positional_args", "[", "i", "]", "if", "type", "(", "arg", ")", "is", "Parameter", ":", "# This is a lambda argument", "# arg.name is the actual string of the argument", "# here we need the index", "arglist", "[", "i", "]", "=", "[", "0", ",", "capture", ".", "input_arg_names", ".", "index", "(", "arg", ".", "name", ")", "]", "else", ":", "# this is a captured value", "arglist", "[", "i", "]", "=", "[", "1", ",", "arg", "]", "# now. the named arguments are somewhat annoying", "for", "i", "in", "capture", ".", "named_args", ":", "arg", "=", "capture", ".", "named_args", "[", "i", "]", "if", "type", "(", "arg", ")", "is", "Parameter", ":", "# This is a lambda argument", "# arg.name is the actual string of the argument", "# here we need the index", "arglist", "[", "argnames", ".", "index", "(", "i", ")", "]", "=", "[", "0", ",", "capture", ".", "input_arg_names", ".", "index", "(", "arg", ".", "name", ")", "]", "else", ":", "# this is a captured value", "arglist", "[", "argnames", ".", "index", "(", "i", ")", "]", "=", "[", "1", ",", "arg", "]", "# done. Make sure all arguments are filled", "for", "i", "in", "arglist", ":", "if", "i", "[", "0", "]", "==", "-", "1", ":", "raise", "RuntimeError", "(", "\"Incomplete function specification\"", ")", "# attempt to recursively break down any other functions", "import", "inspect", "for", "i", "in", "range", "(", "len", "(", "arglist", ")", ")", ":", "if", "arglist", "[", "i", "]", "[", "0", "]", "==", "1", "and", "inspect", ".", "isfunction", "(", "arglist", "[", "i", "]", "[", "1", "]", ")", ":", "try", ":", "arglist", "[", "i", "]", "[", "1", "]", "=", "_build_native_function_call", "(", "arglist", "[", "i", "]", "[", "1", "]", ")", "except", ":", "pass", "return", "_Closure", "(", "native_function_name", ",", "arglist", ")" ]
https://github.com/turi-code/SFrame/blob/796b9bdfb2fa1b881d82080754643c7e68629cd2/oss_src/unity/python/sframe/extensions.py#L658-L738
google/gemmlowp
e844ffd17118c1e17d94e1ba4354c075a4577b88
meta/generators/quantized_mul_kernels_common.py
python
_GenerateNxMLoadMultiplyAggregate
(emitter, registers, kernel_m, kernel_n, aggregators, lhs, rhs, count)
Emit inner loop for N rows x M cols multiplication.
Emit inner loop for N rows x M cols multiplication.
[ "Emit", "inner", "loop", "for", "N", "rows", "x", "M", "cols", "multiplication", "." ]
def _GenerateNxMLoadMultiplyAggregate(emitter, registers, kernel_m, kernel_n, aggregators, lhs, rhs, count): """Emit inner loop for N rows x M cols multiplication.""" emitter.EmitNewline() emitter.EmitComment('General NxM lanes loop.') emitter.EmitNumericalLabel(1) emitter.EmitNewline() emitter.EmitComment('Subtract counter.') emitter.EmitSubs(count, count, emitter.ImmediateConstant(8)) emitter.EmitNewline() lhs_load = [registers.DoubleRegister() for unused_i in range(kernel_m)] rhs_load = [registers.DoubleRegister() for unused_i in range(kernel_n)] emitter.EmitVLoadAE(8 * kernel_m, 8, lhs_load, lhs, 64) emitter.EmitVLoadAE(8 * kernel_n, 8, rhs_load, rhs, 64) emitter.EmitPldOffset(lhs, emitter.ImmediateConstant(64)) emitter.EmitPldOffset(rhs, emitter.ImmediateConstant(64)) results = [ registers.QuadRegister() for unused_i in range(kernel_m * kernel_n) ] for row in range(kernel_m): for col in range(kernel_n): index = row * kernel_n + col emitter.EmitVMull('u8', results[index], rhs_load[col], lhs_load[row]) for i in range(kernel_m * kernel_n): emitter.EmitVPadal('u16', aggregators[i], results[i]) emitter.EmitNewline() emitter.EmitComment('Loop break.') emitter.EmitBgtBack(1) registers.FreeRegisters(lhs_load + rhs_load + results)
[ "def", "_GenerateNxMLoadMultiplyAggregate", "(", "emitter", ",", "registers", ",", "kernel_m", ",", "kernel_n", ",", "aggregators", ",", "lhs", ",", "rhs", ",", "count", ")", ":", "emitter", ".", "EmitNewline", "(", ")", "emitter", ".", "EmitComment", "(", "'General NxM lanes loop.'", ")", "emitter", ".", "EmitNumericalLabel", "(", "1", ")", "emitter", ".", "EmitNewline", "(", ")", "emitter", ".", "EmitComment", "(", "'Subtract counter.'", ")", "emitter", ".", "EmitSubs", "(", "count", ",", "count", ",", "emitter", ".", "ImmediateConstant", "(", "8", ")", ")", "emitter", ".", "EmitNewline", "(", ")", "lhs_load", "=", "[", "registers", ".", "DoubleRegister", "(", ")", "for", "unused_i", "in", "range", "(", "kernel_m", ")", "]", "rhs_load", "=", "[", "registers", ".", "DoubleRegister", "(", ")", "for", "unused_i", "in", "range", "(", "kernel_n", ")", "]", "emitter", ".", "EmitVLoadAE", "(", "8", "*", "kernel_m", ",", "8", ",", "lhs_load", ",", "lhs", ",", "64", ")", "emitter", ".", "EmitVLoadAE", "(", "8", "*", "kernel_n", ",", "8", ",", "rhs_load", ",", "rhs", ",", "64", ")", "emitter", ".", "EmitPldOffset", "(", "lhs", ",", "emitter", ".", "ImmediateConstant", "(", "64", ")", ")", "emitter", ".", "EmitPldOffset", "(", "rhs", ",", "emitter", ".", "ImmediateConstant", "(", "64", ")", ")", "results", "=", "[", "registers", ".", "QuadRegister", "(", ")", "for", "unused_i", "in", "range", "(", "kernel_m", "*", "kernel_n", ")", "]", "for", "row", "in", "range", "(", "kernel_m", ")", ":", "for", "col", "in", "range", "(", "kernel_n", ")", ":", "index", "=", "row", "*", "kernel_n", "+", "col", "emitter", ".", "EmitVMull", "(", "'u8'", ",", "results", "[", "index", "]", ",", "rhs_load", "[", "col", "]", ",", "lhs_load", "[", "row", "]", ")", "for", "i", "in", "range", "(", "kernel_m", "*", "kernel_n", ")", ":", "emitter", ".", "EmitVPadal", "(", "'u16'", ",", "aggregators", "[", "i", "]", ",", "results", "[", "i", "]", ")", "emitter", ".", "EmitNewline", "(", ")", "emitter", ".", "EmitComment", "(", "'Loop break.'", ")", "emitter", ".", "EmitBgtBack", "(", "1", ")", "registers", ".", "FreeRegisters", "(", "lhs_load", "+", "rhs_load", "+", "results", ")" ]
https://github.com/google/gemmlowp/blob/e844ffd17118c1e17d94e1ba4354c075a4577b88/meta/generators/quantized_mul_kernels_common.py#L410-L446
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
third_party/WebKit/Source/bindings/scripts/idl_definitions.py
python
IdlDefinitions.__init__
(self, idl_name, node)
Args: node: AST root node, class == 'File
Args: node: AST root node, class == 'File
[ "Args", ":", "node", ":", "AST", "root", "node", "class", "==", "File" ]
def __init__(self, idl_name, node): """Args: node: AST root node, class == 'File'""" self.callback_functions = {} self.dictionaries = {} self.enumerations = {} self.implements = [] self.interfaces = {} self.idl_name = idl_name self.typedefs = {} node_class = node.GetClass() if node_class != 'File': raise ValueError('Unrecognized node class: %s' % node_class) children = node.GetChildren() for child in children: child_class = child.GetClass() if child_class == 'Interface': interface = IdlInterface(idl_name, child) self.interfaces[interface.name] = interface elif child_class == 'Exception': exception = IdlException(idl_name, child) # For simplicity, treat exceptions as interfaces self.interfaces[exception.name] = exception elif child_class == 'Typedef': typedef = IdlTypedef(child) self.typedefs[typedef.name] = typedef elif child_class == 'Enum': enumeration = IdlEnum(idl_name, child) self.enumerations[enumeration.name] = enumeration elif child_class == 'Callback': callback_function = IdlCallbackFunction(idl_name, child) self.callback_functions[callback_function.name] = callback_function elif child_class == 'Implements': self.implements.append(IdlImplement(child)) elif child_class == 'Dictionary': dictionary = IdlDictionary(idl_name, child) self.dictionaries[dictionary.name] = dictionary else: raise ValueError('Unrecognized node class: %s' % child_class)
[ "def", "__init__", "(", "self", ",", "idl_name", ",", "node", ")", ":", "self", ".", "callback_functions", "=", "{", "}", "self", ".", "dictionaries", "=", "{", "}", "self", ".", "enumerations", "=", "{", "}", "self", ".", "implements", "=", "[", "]", "self", ".", "interfaces", "=", "{", "}", "self", ".", "idl_name", "=", "idl_name", "self", ".", "typedefs", "=", "{", "}", "node_class", "=", "node", ".", "GetClass", "(", ")", "if", "node_class", "!=", "'File'", ":", "raise", "ValueError", "(", "'Unrecognized node class: %s'", "%", "node_class", ")", "children", "=", "node", ".", "GetChildren", "(", ")", "for", "child", "in", "children", ":", "child_class", "=", "child", ".", "GetClass", "(", ")", "if", "child_class", "==", "'Interface'", ":", "interface", "=", "IdlInterface", "(", "idl_name", ",", "child", ")", "self", ".", "interfaces", "[", "interface", ".", "name", "]", "=", "interface", "elif", "child_class", "==", "'Exception'", ":", "exception", "=", "IdlException", "(", "idl_name", ",", "child", ")", "# For simplicity, treat exceptions as interfaces", "self", ".", "interfaces", "[", "exception", ".", "name", "]", "=", "exception", "elif", "child_class", "==", "'Typedef'", ":", "typedef", "=", "IdlTypedef", "(", "child", ")", "self", ".", "typedefs", "[", "typedef", ".", "name", "]", "=", "typedef", "elif", "child_class", "==", "'Enum'", ":", "enumeration", "=", "IdlEnum", "(", "idl_name", ",", "child", ")", "self", ".", "enumerations", "[", "enumeration", ".", "name", "]", "=", "enumeration", "elif", "child_class", "==", "'Callback'", ":", "callback_function", "=", "IdlCallbackFunction", "(", "idl_name", ",", "child", ")", "self", ".", "callback_functions", "[", "callback_function", ".", "name", "]", "=", "callback_function", "elif", "child_class", "==", "'Implements'", ":", "self", ".", "implements", ".", "append", "(", "IdlImplement", "(", "child", ")", ")", "elif", "child_class", "==", "'Dictionary'", ":", "dictionary", "=", "IdlDictionary", "(", "idl_name", ",", "child", ")", "self", ".", "dictionaries", "[", "dictionary", ".", "name", "]", "=", "dictionary", "else", ":", "raise", "ValueError", "(", "'Unrecognized node class: %s'", "%", "child_class", ")" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Source/bindings/scripts/idl_definitions.py#L91-L130
kamyu104/LeetCode-Solutions
77605708a927ea3b85aee5a479db733938c7c211
Python/peeking-iterator.py
python
PeekingIterator.next
(self)
return self.val_
:rtype: int
:rtype: int
[ ":", "rtype", ":", "int" ]
def next(self): """ :rtype: int """ self.val_ = self.peek() self.has_peeked_ = False self.has_next_ = self.iterator.hasNext() return self.val_
[ "def", "next", "(", "self", ")", ":", "self", ".", "val_", "=", "self", ".", "peek", "(", ")", "self", ".", "has_peeked_", "=", "False", "self", ".", "has_next_", "=", "self", ".", "iterator", ".", "hasNext", "(", ")", "return", "self", ".", "val_" ]
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/peeking-iterator.py#L26-L33
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/aui.py
python
AuiManager.AddPane
(*args, **kwargs)
return _aui.AuiManager_AddPane(*args, **kwargs)
AddPane(self, Window window, AuiPaneInfo paneInfo, Point dropPos) -> bool
AddPane(self, Window window, AuiPaneInfo paneInfo, Point dropPos) -> bool
[ "AddPane", "(", "self", "Window", "window", "AuiPaneInfo", "paneInfo", "Point", "dropPos", ")", "-", ">", "bool" ]
def AddPane(*args, **kwargs): """AddPane(self, Window window, AuiPaneInfo paneInfo, Point dropPos) -> bool""" return _aui.AuiManager_AddPane(*args, **kwargs)
[ "def", "AddPane", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_aui", ".", "AuiManager_AddPane", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/aui.py#L643-L645
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
python
parseDTD
(ExternalID, SystemID)
return xmlDtd(_obj=ret)
Load and parse an external subset.
Load and parse an external subset.
[ "Load", "and", "parse", "an", "external", "subset", "." ]
def parseDTD(ExternalID, SystemID): """Load and parse an external subset. """ ret = libxml2mod.xmlParseDTD(ExternalID, SystemID) if ret is None:raise parserError('xmlParseDTD() failed') return xmlDtd(_obj=ret)
[ "def", "parseDTD", "(", "ExternalID", ",", "SystemID", ")", ":", "ret", "=", "libxml2mod", ".", "xmlParseDTD", "(", "ExternalID", ",", "SystemID", ")", "if", "ret", "is", "None", ":", "raise", "parserError", "(", "'xmlParseDTD() failed'", ")", "return", "xmlDtd", "(", "_obj", "=", "ret", ")" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L530-L534
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/utils/encoding.py
python
auto_decode
(data)
return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), )
Check a bytes string for a BOM to correctly detect the encoding Fallback to locale.getpreferredencoding(False) like open() on Python3
Check a bytes string for a BOM to correctly detect the encoding
[ "Check", "a", "bytes", "string", "for", "a", "BOM", "to", "correctly", "detect", "the", "encoding" ]
def auto_decode(data): # type: (bytes) -> str """Check a bytes string for a BOM to correctly detect the encoding Fallback to locale.getpreferredencoding(False) like open() on Python3""" for bom, encoding in BOMS: if data.startswith(bom): return data[len(bom):].decode(encoding) # Lets check the first two lines as in PEP263 for line in data.split(b'\n')[:2]: if line[0:1] == b'#' and ENCODING_RE.search(line): result = ENCODING_RE.search(line) assert result is not None encoding = result.groups()[0].decode('ascii') return data.decode(encoding) return data.decode( locale.getpreferredencoding(False) or sys.getdefaultencoding(), )
[ "def", "auto_decode", "(", "data", ")", ":", "# type: (bytes) -> str", "for", "bom", ",", "encoding", "in", "BOMS", ":", "if", "data", ".", "startswith", "(", "bom", ")", ":", "return", "data", "[", "len", "(", "bom", ")", ":", "]", ".", "decode", "(", "encoding", ")", "# Lets check the first two lines as in PEP263", "for", "line", "in", "data", ".", "split", "(", "b'\\n'", ")", "[", ":", "2", "]", ":", "if", "line", "[", "0", ":", "1", "]", "==", "b'#'", "and", "ENCODING_RE", ".", "search", "(", "line", ")", ":", "result", "=", "ENCODING_RE", ".", "search", "(", "line", ")", "assert", "result", "is", "not", "None", "encoding", "=", "result", ".", "groups", "(", ")", "[", "0", "]", ".", "decode", "(", "'ascii'", ")", "return", "data", ".", "decode", "(", "encoding", ")", "return", "data", ".", "decode", "(", "locale", ".", "getpreferredencoding", "(", "False", ")", "or", "sys", ".", "getdefaultencoding", "(", ")", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_internal/utils/encoding.py#L24-L41
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/urllib/request.py
python
URLopener.open_unknown
(self, fullurl, data=None)
Overridable interface to open unknown URL type.
Overridable interface to open unknown URL type.
[ "Overridable", "interface", "to", "open", "unknown", "URL", "type", "." ]
def open_unknown(self, fullurl, data=None): """Overridable interface to open unknown URL type.""" type, url = splittype(fullurl) raise OSError('url error', 'unknown url type', type)
[ "def", "open_unknown", "(", "self", ",", "fullurl", ",", "data", "=", "None", ")", ":", "type", ",", "url", "=", "splittype", "(", "fullurl", ")", "raise", "OSError", "(", "'url error'", ",", "'unknown url type'", ",", "type", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/urllib/request.py#L1797-L1800
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/docview.py
python
DocManager.FileHistoryUseMenu
(self, menu)
Use this menu for appending recently-visited document filenames, for convenient access. Calling this function with a valid menu enables the history list functionality. Note that you can add multiple menus using this function, to be managed by the file history object.
Use this menu for appending recently-visited document filenames, for convenient access. Calling this function with a valid menu enables the history list functionality.
[ "Use", "this", "menu", "for", "appending", "recently", "-", "visited", "document", "filenames", "for", "convenient", "access", ".", "Calling", "this", "function", "with", "a", "valid", "menu", "enables", "the", "history", "list", "functionality", "." ]
def FileHistoryUseMenu(self, menu): """ Use this menu for appending recently-visited document filenames, for convenient access. Calling this function with a valid menu enables the history list functionality. Note that you can add multiple menus using this function, to be managed by the file history object. """ if self._fileHistory: self._fileHistory.UseMenu(menu)
[ "def", "FileHistoryUseMenu", "(", "self", ",", "menu", ")", ":", "if", "self", ".", "_fileHistory", ":", "self", ".", "_fileHistory", ".", "UseMenu", "(", "menu", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/docview.py#L2056-L2066
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/python/turicreate/data_structures/sarray.py
python
SArray.all
(self)
Return True if every element of the SArray evaluates to True. For numeric SArrays zeros and missing values (``None``) evaluate to False, while all non-zero, non-missing values evaluate to True. For string, list, and dictionary SArrays, empty values (zero length strings, lists or dictionaries) or missing values (``None``) evaluate to False. All other values evaluate to True. Returns True on an empty SArray. Returns ------- out : bool See Also -------- any Examples -------- >>> turicreate.SArray([1, None]).all() False >>> turicreate.SArray([1, 0]).all() False >>> turicreate.SArray([1, 2]).all() True >>> turicreate.SArray(["hello", "world"]).all() True >>> turicreate.SArray(["hello", ""]).all() False >>> turicreate.SArray([]).all() True
Return True if every element of the SArray evaluates to True. For numeric SArrays zeros and missing values (``None``) evaluate to False, while all non-zero, non-missing values evaluate to True. For string, list, and dictionary SArrays, empty values (zero length strings, lists or dictionaries) or missing values (``None``) evaluate to False. All other values evaluate to True.
[ "Return", "True", "if", "every", "element", "of", "the", "SArray", "evaluates", "to", "True", ".", "For", "numeric", "SArrays", "zeros", "and", "missing", "values", "(", "None", ")", "evaluate", "to", "False", "while", "all", "non", "-", "zero", "non", "-", "missing", "values", "evaluate", "to", "True", ".", "For", "string", "list", "and", "dictionary", "SArrays", "empty", "values", "(", "zero", "length", "strings", "lists", "or", "dictionaries", ")", "or", "missing", "values", "(", "None", ")", "evaluate", "to", "False", ".", "All", "other", "values", "evaluate", "to", "True", "." ]
def all(self): """ Return True if every element of the SArray evaluates to True. For numeric SArrays zeros and missing values (``None``) evaluate to False, while all non-zero, non-missing values evaluate to True. For string, list, and dictionary SArrays, empty values (zero length strings, lists or dictionaries) or missing values (``None``) evaluate to False. All other values evaluate to True. Returns True on an empty SArray. Returns ------- out : bool See Also -------- any Examples -------- >>> turicreate.SArray([1, None]).all() False >>> turicreate.SArray([1, 0]).all() False >>> turicreate.SArray([1, 2]).all() True >>> turicreate.SArray(["hello", "world"]).all() True >>> turicreate.SArray(["hello", ""]).all() False >>> turicreate.SArray([]).all() True """ with cython_context(): return self.__proxy__.all()
[ "def", "all", "(", "self", ")", ":", "with", "cython_context", "(", ")", ":", "return", "self", ".", "__proxy__", ".", "all", "(", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/data_structures/sarray.py#L2067-L2102
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/ma/mrecords.py
python
_mrreconstruct
(subtype, baseclass, baseshape, basetype,)
return subtype.__new__(subtype, _data, mask=_mask, dtype=basetype,)
Build a new MaskedArray from the information stored in a pickle.
Build a new MaskedArray from the information stored in a pickle.
[ "Build", "a", "new", "MaskedArray", "from", "the", "information", "stored", "in", "a", "pickle", "." ]
def _mrreconstruct(subtype, baseclass, baseshape, basetype,): """ Build a new MaskedArray from the information stored in a pickle. """ _data = ndarray.__new__(baseclass, baseshape, basetype).view(subtype) _mask = ndarray.__new__(ndarray, baseshape, 'b1') return subtype.__new__(subtype, _data, mask=_mask, dtype=basetype,)
[ "def", "_mrreconstruct", "(", "subtype", ",", "baseclass", ",", "baseshape", ",", "basetype", ",", ")", ":", "_data", "=", "ndarray", ".", "__new__", "(", "baseclass", ",", "baseshape", ",", "basetype", ")", ".", "view", "(", "subtype", ")", "_mask", "=", "ndarray", ".", "__new__", "(", "ndarray", ",", "baseshape", ",", "'b1'", ")", "return", "subtype", ".", "__new__", "(", "subtype", ",", "_data", ",", "mask", "=", "_mask", ",", "dtype", "=", "basetype", ",", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/ma/mrecords.py#L490-L497
SpenceKonde/megaTinyCore
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py
python
Avr8Protocol.set_interface
(self, interface)
Sets the function field in the physical context :param interface: physical interface setting
Sets the function field in the physical context
[ "Sets", "the", "function", "field", "in", "the", "physical", "context" ]
def set_interface(self, interface): """ Sets the function field in the physical context :param interface: physical interface setting """ self.set_byte(self.AVR8_CTXT_PHYSICAL, self.AVR8_PHY_INTERFACE, interface)
[ "def", "set_interface", "(", "self", ",", "interface", ")", ":", "self", ".", "set_byte", "(", "self", ".", "AVR8_CTXT_PHYSICAL", ",", "self", ".", "AVR8_PHY_INTERFACE", ",", "interface", ")" ]
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py#L241-L247
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/syntax/_caml.py
python
SyntaxData.GetKeywords
(self)
return [CAML_KW1, CAML_KW2, CAML_KW3]
Returns Specified Keywords List
Returns Specified Keywords List
[ "Returns", "Specified", "Keywords", "List" ]
def GetKeywords(self): """Returns Specified Keywords List """ return [CAML_KW1, CAML_KW2, CAML_KW3]
[ "def", "GetKeywords", "(", "self", ")", ":", "return", "[", "CAML_KW1", ",", "CAML_KW2", ",", "CAML_KW3", "]" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/syntax/_caml.py#L81-L83
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/distutils/cmd.py
python
Command.__init__
(self, dist)
Create and initialize a new Command object. Most importantly, invokes the 'initialize_options()' method, which is the real initializer and depends on the actual command being instantiated.
Create and initialize a new Command object. Most importantly, invokes the 'initialize_options()' method, which is the real initializer and depends on the actual command being instantiated.
[ "Create", "and", "initialize", "a", "new", "Command", "object", ".", "Most", "importantly", "invokes", "the", "initialize_options", "()", "method", "which", "is", "the", "real", "initializer", "and", "depends", "on", "the", "actual", "command", "being", "instantiated", "." ]
def __init__(self, dist): """Create and initialize a new Command object. Most importantly, invokes the 'initialize_options()' method, which is the real initializer and depends on the actual command being instantiated. """ # late import because of mutual dependence between these classes from distutils.dist import Distribution if not isinstance(dist, Distribution): raise TypeError("dist must be a Distribution instance") if self.__class__ is Command: raise RuntimeError("Command is an abstract class") self.distribution = dist self.initialize_options() # Per-command versions of the global flags, so that the user can # customize Distutils' behaviour command-by-command and let some # commands fall back on the Distribution's behaviour. None means # "not defined, check self.distribution's copy", while 0 or 1 mean # false and true (duh). Note that this means figuring out the real # value of each flag is a touch complicated -- hence "self._dry_run" # will be handled by __getattr__, below. # XXX This needs to be fixed. self._dry_run = None # verbose is largely ignored, but needs to be set for # backwards compatibility (I think)? self.verbose = dist.verbose # Some commands define a 'self.force' option to ignore file # timestamps, but methods defined *here* assume that # 'self.force' exists for all commands. So define it here # just to be safe. self.force = None # The 'help' flag is just used for command-line parsing, so # none of that complicated bureaucracy is needed. self.help = 0 # 'finalized' records whether or not 'finalize_options()' has been # called. 'finalize_options()' itself should not pay attention to # this flag: it is the business of 'ensure_finalized()', which # always calls 'finalize_options()', to respect/update it. self.finalized = 0
[ "def", "__init__", "(", "self", ",", "dist", ")", ":", "# late import because of mutual dependence between these classes", "from", "distutils", ".", "dist", "import", "Distribution", "if", "not", "isinstance", "(", "dist", ",", "Distribution", ")", ":", "raise", "TypeError", "(", "\"dist must be a Distribution instance\"", ")", "if", "self", ".", "__class__", "is", "Command", ":", "raise", "RuntimeError", "(", "\"Command is an abstract class\"", ")", "self", ".", "distribution", "=", "dist", "self", ".", "initialize_options", "(", ")", "# Per-command versions of the global flags, so that the user can", "# customize Distutils' behaviour command-by-command and let some", "# commands fall back on the Distribution's behaviour. None means", "# \"not defined, check self.distribution's copy\", while 0 or 1 mean", "# false and true (duh). Note that this means figuring out the real", "# value of each flag is a touch complicated -- hence \"self._dry_run\"", "# will be handled by __getattr__, below.", "# XXX This needs to be fixed.", "self", ".", "_dry_run", "=", "None", "# verbose is largely ignored, but needs to be set for", "# backwards compatibility (I think)?", "self", ".", "verbose", "=", "dist", ".", "verbose", "# Some commands define a 'self.force' option to ignore file", "# timestamps, but methods defined *here* assume that", "# 'self.force' exists for all commands. So define it here", "# just to be safe.", "self", ".", "force", "=", "None", "# The 'help' flag is just used for command-line parsing, so", "# none of that complicated bureaucracy is needed.", "self", ".", "help", "=", "0", "# 'finalized' records whether or not 'finalize_options()' has been", "# called. 'finalize_options()' itself should not pay attention to", "# this flag: it is the business of 'ensure_finalized()', which", "# always calls 'finalize_options()', to respect/update it.", "self", ".", "finalized", "=", "0" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/distutils/cmd.py#L47-L92
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/fluid/executor.py
python
Executor.close
(self)
Close the executor. This interface is used for distributed training (PServers mode). This executor can not be used after calling the interface, because this interface releases resources associated with the current Trainer. Returns: None Examples: .. code-block:: python import paddle cpu = paddle.CPUPlace() exe = paddle.static.Executor(cpu) # execute training or testing exe.close()
Close the executor. This interface is used for distributed training (PServers mode). This executor can not be used after calling the interface, because this interface releases resources associated with the current Trainer.
[ "Close", "the", "executor", ".", "This", "interface", "is", "used", "for", "distributed", "training", "(", "PServers", "mode", ")", ".", "This", "executor", "can", "not", "be", "used", "after", "calling", "the", "interface", "because", "this", "interface", "releases", "resources", "associated", "with", "the", "current", "Trainer", "." ]
def close(self): """ Close the executor. This interface is used for distributed training (PServers mode). This executor can not be used after calling the interface, because this interface releases resources associated with the current Trainer. Returns: None Examples: .. code-block:: python import paddle cpu = paddle.CPUPlace() exe = paddle.static.Executor(cpu) # execute training or testing exe.close() """ if not self._closed: self._closed = True for k, trainer_instance in self.trainer_caches.items(): self._default_executor.release_trainer(trainer_instance) del trainer_instance self._default_executor.close()
[ "def", "close", "(", "self", ")", ":", "if", "not", "self", ".", "_closed", ":", "self", ".", "_closed", "=", "True", "for", "k", ",", "trainer_instance", "in", "self", ".", "trainer_caches", ".", "items", "(", ")", ":", "self", ".", "_default_executor", ".", "release_trainer", "(", "trainer_instance", ")", "del", "trainer_instance", "self", ".", "_default_executor", ".", "close", "(", ")" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/executor.py#L1000-L1024
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/_gdi.py
python
Region.IntersectRegion
(*args, **kwargs)
return _gdi_.Region_IntersectRegion(*args, **kwargs)
IntersectRegion(self, Region region) -> bool
IntersectRegion(self, Region region) -> bool
[ "IntersectRegion", "(", "self", "Region", "region", ")", "-", ">", "bool" ]
def IntersectRegion(*args, **kwargs): """IntersectRegion(self, Region region) -> bool""" return _gdi_.Region_IntersectRegion(*args, **kwargs)
[ "def", "IntersectRegion", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdi_", ".", "Region_IntersectRegion", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_gdi.py#L1583-L1585
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cgutils.py
python
for_range_slice
(builder, start, stop, step, intp=None, inc=True)
Generate LLVM IR for a for-loop based on a slice. Yields a (index, count) tuple where `index` is the slice index's value inside the loop, and `count` the iteration count. Parameters ------------- builder : object Builder object start : int The beginning value of the slice stop : int The end value of the slice step : int The step value of the slice intp : The data type inc : boolean, optional Signals whether the step is positive (True) or negative (False). Returns ----------- None
Generate LLVM IR for a for-loop based on a slice. Yields a (index, count) tuple where `index` is the slice index's value inside the loop, and `count` the iteration count.
[ "Generate", "LLVM", "IR", "for", "a", "for", "-", "loop", "based", "on", "a", "slice", ".", "Yields", "a", "(", "index", "count", ")", "tuple", "where", "index", "is", "the", "slice", "index", "s", "value", "inside", "the", "loop", "and", "count", "the", "iteration", "count", "." ]
def for_range_slice(builder, start, stop, step, intp=None, inc=True): """ Generate LLVM IR for a for-loop based on a slice. Yields a (index, count) tuple where `index` is the slice index's value inside the loop, and `count` the iteration count. Parameters ------------- builder : object Builder object start : int The beginning value of the slice stop : int The end value of the slice step : int The step value of the slice intp : The data type inc : boolean, optional Signals whether the step is positive (True) or negative (False). Returns ----------- None """ if intp is None: intp = start.type bbcond = builder.append_basic_block("for.cond") bbbody = builder.append_basic_block("for.body") bbend = builder.append_basic_block("for.end") bbstart = builder.basic_block builder.branch(bbcond) with builder.goto_block(bbcond): index = builder.phi(intp, name="loop.index") count = builder.phi(intp, name="loop.count") if (inc): pred = builder.icmp_signed('<', index, stop) else: pred = builder.icmp_signed('>', index, stop) builder.cbranch(pred, bbbody, bbend) with builder.goto_block(bbbody): yield index, count bbbody = builder.basic_block incr = builder.add(index, step) next_count = increment_index(builder, count) terminate(builder, bbcond) index.add_incoming(start, bbstart) index.add_incoming(incr, bbbody) count.add_incoming(ir.Constant(intp, 0), bbstart) count.add_incoming(next_count, bbbody) builder.position_at_end(bbend)
[ "def", "for_range_slice", "(", "builder", ",", "start", ",", "stop", ",", "step", ",", "intp", "=", "None", ",", "inc", "=", "True", ")", ":", "if", "intp", "is", "None", ":", "intp", "=", "start", ".", "type", "bbcond", "=", "builder", ".", "append_basic_block", "(", "\"for.cond\"", ")", "bbbody", "=", "builder", ".", "append_basic_block", "(", "\"for.body\"", ")", "bbend", "=", "builder", ".", "append_basic_block", "(", "\"for.end\"", ")", "bbstart", "=", "builder", ".", "basic_block", "builder", ".", "branch", "(", "bbcond", ")", "with", "builder", ".", "goto_block", "(", "bbcond", ")", ":", "index", "=", "builder", ".", "phi", "(", "intp", ",", "name", "=", "\"loop.index\"", ")", "count", "=", "builder", ".", "phi", "(", "intp", ",", "name", "=", "\"loop.count\"", ")", "if", "(", "inc", ")", ":", "pred", "=", "builder", ".", "icmp_signed", "(", "'<'", ",", "index", ",", "stop", ")", "else", ":", "pred", "=", "builder", ".", "icmp_signed", "(", "'>'", ",", "index", ",", "stop", ")", "builder", ".", "cbranch", "(", "pred", ",", "bbbody", ",", "bbend", ")", "with", "builder", ".", "goto_block", "(", "bbbody", ")", ":", "yield", "index", ",", "count", "bbbody", "=", "builder", ".", "basic_block", "incr", "=", "builder", ".", "add", "(", "index", ",", "step", ")", "next_count", "=", "increment_index", "(", "builder", ",", "count", ")", "terminate", "(", "builder", ",", "bbcond", ")", "index", ".", "add_incoming", "(", "start", ",", "bbstart", ")", "index", ".", "add_incoming", "(", "incr", ",", "bbbody", ")", "count", ".", "add_incoming", "(", "ir", ".", "Constant", "(", "intp", ",", "0", ")", ",", "bbstart", ")", "count", ".", "add_incoming", "(", "next_count", ",", "bbbody", ")", "builder", ".", "position_at_end", "(", "bbend", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cgutils.py#L497-L551
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/extern/pygments/lexers/__init__.py
python
get_lexer_for_filename
(_fn, code=None, **options)
Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyze_text()`` to figure out which one is more appropriate.
Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyze_text()`` to figure out which one is more appropriate.
[ "Get", "a", "lexer", "for", "a", "filename", ".", "If", "multiple", "lexers", "match", "the", "filename", "pattern", "use", "analyze_text", "()", "to", "figure", "out", "which", "one", "is", "more", "appropriate", "." ]
def get_lexer_for_filename(_fn, code=None, **options): """ Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyze_text()`` to figure out which one is more appropriate. """ matches = [] fn = basename(_fn) for modname, name, _, filenames, _ in LEXERS.itervalues(): for filename in filenames: if fnmatch.fnmatch(fn, filename): if name not in _lexer_cache: _load_lexers(modname) matches.append(_lexer_cache[name]) for cls in find_plugin_lexers(): for filename in cls.filenames: if fnmatch.fnmatch(fn, filename): matches.append(cls) if sys.version_info > (3,) and isinstance(code, bytes): # decode it, since all analyse_text functions expect unicode code = code.decode('latin1') def get_rating(cls): # The class _always_ defines analyse_text because it's included in # the Lexer class. The default implementation returns None which # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py # to find lexers which need it overridden. d = cls.analyse_text(code) #print "Got %r from %r" % (d, cls) return d if code: matches.sort(key=get_rating) if matches: #print "Possible lexers, after sort:", matches return matches[-1](**options) raise ClassNotFound('no lexer for filename %r found' % _fn)
[ "def", "get_lexer_for_filename", "(", "_fn", ",", "code", "=", "None", ",", "*", "*", "options", ")", ":", "matches", "=", "[", "]", "fn", "=", "basename", "(", "_fn", ")", "for", "modname", ",", "name", ",", "_", ",", "filenames", ",", "_", "in", "LEXERS", ".", "itervalues", "(", ")", ":", "for", "filename", "in", "filenames", ":", "if", "fnmatch", ".", "fnmatch", "(", "fn", ",", "filename", ")", ":", "if", "name", "not", "in", "_lexer_cache", ":", "_load_lexers", "(", "modname", ")", "matches", ".", "append", "(", "_lexer_cache", "[", "name", "]", ")", "for", "cls", "in", "find_plugin_lexers", "(", ")", ":", "for", "filename", "in", "cls", ".", "filenames", ":", "if", "fnmatch", ".", "fnmatch", "(", "fn", ",", "filename", ")", ":", "matches", ".", "append", "(", "cls", ")", "if", "sys", ".", "version_info", ">", "(", "3", ",", ")", "and", "isinstance", "(", "code", ",", "bytes", ")", ":", "# decode it, since all analyse_text functions expect unicode", "code", "=", "code", ".", "decode", "(", "'latin1'", ")", "def", "get_rating", "(", "cls", ")", ":", "# The class _always_ defines analyse_text because it's included in", "# the Lexer class. The default implementation returns None which", "# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py", "# to find lexers which need it overridden.", "d", "=", "cls", ".", "analyse_text", "(", "code", ")", "#print \"Got %r from %r\" % (d, cls)", "return", "d", "if", "code", ":", "matches", ".", "sort", "(", "key", "=", "get_rating", ")", "if", "matches", ":", "#print \"Possible lexers, after sort:\", matches", "return", "matches", "[", "-", "1", "]", "(", "*", "*", "options", ")", "raise", "ClassNotFound", "(", "'no lexer for filename %r found'", "%", "_fn", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/pygments/lexers/__init__.py#L83-L120
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/stc.py
python
StyledTextCtrl.BackTab
(*args, **kwargs)
return _stc.StyledTextCtrl_BackTab(*args, **kwargs)
BackTab(self) Dedent the selected lines.
BackTab(self)
[ "BackTab", "(", "self", ")" ]
def BackTab(*args, **kwargs): """ BackTab(self) Dedent the selected lines. """ return _stc.StyledTextCtrl_BackTab(*args, **kwargs)
[ "def", "BackTab", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_BackTab", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/stc.py#L4555-L4561
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scikit-learn/py2/sklearn/multioutput.py
python
MultiOutputRegressor.score
(self, X, y, sample_weight=None)
return r2_score(y, self.predict(X), sample_weight=sample_weight, multioutput='uniform_average')
Returns the coefficient of determination R^2 of the prediction. The coefficient R^2 is defined as (1 - u/v), where u is the regression sum of squares ((y_true - y_pred) ** 2).sum() and v is the residual sum of squares ((y_true - y_true.mean()) ** 2).sum(). Best possible score is 1.0 and it can be negative (because the model can be arbitrarily worse). A constant model that always predicts the expected value of y, disregarding the input features, would get a R^2 score of 0.0. Notes ----- R^2 is calculated by weighting all the targets equally using `multioutput='uniform_average'`. Parameters ---------- X : array-like, shape (n_samples, n_features) Test samples. y : array-like, shape (n_samples) or (n_samples, n_outputs) True values for X. sample_weight : array-like, shape [n_samples], optional Sample weights. Returns ------- score : float R^2 of self.predict(X) wrt. y.
Returns the coefficient of determination R^2 of the prediction.
[ "Returns", "the", "coefficient", "of", "determination", "R^2", "of", "the", "prediction", "." ]
def score(self, X, y, sample_weight=None): """Returns the coefficient of determination R^2 of the prediction. The coefficient R^2 is defined as (1 - u/v), where u is the regression sum of squares ((y_true - y_pred) ** 2).sum() and v is the residual sum of squares ((y_true - y_true.mean()) ** 2).sum(). Best possible score is 1.0 and it can be negative (because the model can be arbitrarily worse). A constant model that always predicts the expected value of y, disregarding the input features, would get a R^2 score of 0.0. Notes ----- R^2 is calculated by weighting all the targets equally using `multioutput='uniform_average'`. Parameters ---------- X : array-like, shape (n_samples, n_features) Test samples. y : array-like, shape (n_samples) or (n_samples, n_outputs) True values for X. sample_weight : array-like, shape [n_samples], optional Sample weights. Returns ------- score : float R^2 of self.predict(X) wrt. y. """ # XXX remove in 0.19 when r2_score default for multioutput changes from .metrics import r2_score return r2_score(y, self.predict(X), sample_weight=sample_weight, multioutput='uniform_average')
[ "def", "score", "(", "self", ",", "X", ",", "y", ",", "sample_weight", "=", "None", ")", ":", "# XXX remove in 0.19 when r2_score default for multioutput changes", "from", ".", "metrics", "import", "r2_score", "return", "r2_score", "(", "y", ",", "self", ".", "predict", "(", "X", ")", ",", "sample_weight", "=", "sample_weight", ",", "multioutput", "=", "'uniform_average'", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/multioutput.py#L139-L174
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/random.py
python
Random.betavariate
(self, alpha, beta)
Beta distribution. Conditions on the parameters are alpha > 0 and beta > 0. Returned values range between 0 and 1.
Beta distribution.
[ "Beta", "distribution", "." ]
def betavariate(self, alpha, beta): """Beta distribution. Conditions on the parameters are alpha > 0 and beta > 0. Returned values range between 0 and 1. """ # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). y = self.gammavariate(alpha, 1.) if y == 0: return 0.0 else: return y / (y + self.gammavariate(beta, 1.))
[ "def", "betavariate", "(", "self", ",", "alpha", ",", "beta", ")", ":", "# This version due to Janne Sinkkonen, and matches all the std", "# texts (e.g., Knuth Vol 2 Ed 3 pg 134 \"the beta distribution\").", "y", "=", "self", ".", "gammavariate", "(", "alpha", ",", "1.", ")", "if", "y", "==", "0", ":", "return", "0.0", "else", ":", "return", "y", "/", "(", "y", "+", "self", ".", "gammavariate", "(", "beta", ",", "1.", ")", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/random.py#L603-L617
msracver/Deep-Image-Analogy
632b9287b42552e32dad64922967c8c9ec7fc4d3
examples/pycaffe/tools.py
python
SimpleTransformer.deprocess
(self, im)
return np.uint8(im)
inverse of preprocess()
inverse of preprocess()
[ "inverse", "of", "preprocess", "()" ]
def deprocess(self, im): """ inverse of preprocess() """ im = im.transpose(1, 2, 0) im /= self.scale im += self.mean im = im[:, :, ::-1] # change to RGB return np.uint8(im)
[ "def", "deprocess", "(", "self", ",", "im", ")", ":", "im", "=", "im", ".", "transpose", "(", "1", ",", "2", ",", "0", ")", "im", "/=", "self", ".", "scale", "im", "+=", "self", ".", "mean", "im", "=", "im", "[", ":", ",", ":", ",", ":", ":", "-", "1", "]", "# change to RGB", "return", "np", ".", "uint8", "(", "im", ")" ]
https://github.com/msracver/Deep-Image-Analogy/blob/632b9287b42552e32dad64922967c8c9ec7fc4d3/examples/pycaffe/tools.py#L41-L50
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/python/ops/logging_ops.py
python
Print
(input_, data, message=None, first_n=None, summarize=None, name=None)
return gen_logging_ops._print(input_, data, message, first_n, summarize, name)
Prints a list of tensors. This is an identity op with the side effect of printing `data` when evaluating. Note: This op prints to the standard error. It is not currently compatible with jupyter notebook (printing to the notebook *server's* output, not into the notebook). Args: input_: A tensor passed through this op. data: A list of tensors to print out when op is evaluated. message: A string, prefix of the error message. first_n: Only log `first_n` number of times. Negative numbers log always; this is the default. summarize: Only print this many entries of each tensor. If None, then a maximum of 3 elements are printed per input tensor. name: A name for the operation (optional). Returns: Same tensor as `input_`.
Prints a list of tensors.
[ "Prints", "a", "list", "of", "tensors", "." ]
def Print(input_, data, message=None, first_n=None, summarize=None, name=None): """Prints a list of tensors. This is an identity op with the side effect of printing `data` when evaluating. Note: This op prints to the standard error. It is not currently compatible with jupyter notebook (printing to the notebook *server's* output, not into the notebook). Args: input_: A tensor passed through this op. data: A list of tensors to print out when op is evaluated. message: A string, prefix of the error message. first_n: Only log `first_n` number of times. Negative numbers log always; this is the default. summarize: Only print this many entries of each tensor. If None, then a maximum of 3 elements are printed per input tensor. name: A name for the operation (optional). Returns: Same tensor as `input_`. """ return gen_logging_ops._print(input_, data, message, first_n, summarize, name)
[ "def", "Print", "(", "input_", ",", "data", ",", "message", "=", "None", ",", "first_n", "=", "None", ",", "summarize", "=", "None", ",", "name", "=", "None", ")", ":", "return", "gen_logging_ops", ".", "_print", "(", "input_", ",", "data", ",", "message", ",", "first_n", ",", "summarize", ",", "name", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/python/ops/logging_ops.py#L38-L62
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
VersionInfo.__init__
(self, *args, **kwargs)
__init__(self, String name, int major, int minor, int micro=0, String description=wxEmptyString, String copyright=wxEmptyString) -> VersionInfo
__init__(self, String name, int major, int minor, int micro=0, String description=wxEmptyString, String copyright=wxEmptyString) -> VersionInfo
[ "__init__", "(", "self", "String", "name", "int", "major", "int", "minor", "int", "micro", "=", "0", "String", "description", "=", "wxEmptyString", "String", "copyright", "=", "wxEmptyString", ")", "-", ">", "VersionInfo" ]
def __init__(self, *args, **kwargs): """ __init__(self, String name, int major, int minor, int micro=0, String description=wxEmptyString, String copyright=wxEmptyString) -> VersionInfo """ _core_.VersionInfo_swiginit(self,_core_.new_VersionInfo(*args, **kwargs))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_core_", ".", "VersionInfo_swiginit", "(", "self", ",", "_core_", ".", "new_VersionInfo", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L16559-L16564
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
keepBlanksDefault
(val)
return ret
Set and return the previous value for default blanks text nodes support. The 1.x version of the parser used an heuristic to try to detect ignorable white spaces. As a result the SAX callback was generating xmlSAX2IgnorableWhitespace() callbacks instead of characters() one, and when using the DOM output text nodes containing those blanks were not generated. The 2.x and later version will switch to the XML standard way and ignorableWhitespace() are only generated when running the parser in validating mode and when the current element doesn't allow CDATA or mixed content. This function is provided as a way to force the standard behavior on 1.X libs and to switch back to the old mode for compatibility when running 1.X client code on 2.X . Upgrade of 1.X code should be done by using xmlIsBlankNode() commodity function to detect the "empty" nodes generated. This value also affect autogeneration of indentation when saving code if blanks sections are kept, indentation is not generated.
Set and return the previous value for default blanks text nodes support. The 1.x version of the parser used an heuristic to try to detect ignorable white spaces. As a result the SAX callback was generating xmlSAX2IgnorableWhitespace() callbacks instead of characters() one, and when using the DOM output text nodes containing those blanks were not generated. The 2.x and later version will switch to the XML standard way and ignorableWhitespace() are only generated when running the parser in validating mode and when the current element doesn't allow CDATA or mixed content. This function is provided as a way to force the standard behavior on 1.X libs and to switch back to the old mode for compatibility when running 1.X client code on 2.X . Upgrade of 1.X code should be done by using xmlIsBlankNode() commodity function to detect the "empty" nodes generated. This value also affect autogeneration of indentation when saving code if blanks sections are kept, indentation is not generated.
[ "Set", "and", "return", "the", "previous", "value", "for", "default", "blanks", "text", "nodes", "support", ".", "The", "1", ".", "x", "version", "of", "the", "parser", "used", "an", "heuristic", "to", "try", "to", "detect", "ignorable", "white", "spaces", ".", "As", "a", "result", "the", "SAX", "callback", "was", "generating", "xmlSAX2IgnorableWhitespace", "()", "callbacks", "instead", "of", "characters", "()", "one", "and", "when", "using", "the", "DOM", "output", "text", "nodes", "containing", "those", "blanks", "were", "not", "generated", ".", "The", "2", ".", "x", "and", "later", "version", "will", "switch", "to", "the", "XML", "standard", "way", "and", "ignorableWhitespace", "()", "are", "only", "generated", "when", "running", "the", "parser", "in", "validating", "mode", "and", "when", "the", "current", "element", "doesn", "t", "allow", "CDATA", "or", "mixed", "content", ".", "This", "function", "is", "provided", "as", "a", "way", "to", "force", "the", "standard", "behavior", "on", "1", ".", "X", "libs", "and", "to", "switch", "back", "to", "the", "old", "mode", "for", "compatibility", "when", "running", "1", ".", "X", "client", "code", "on", "2", ".", "X", ".", "Upgrade", "of", "1", ".", "X", "code", "should", "be", "done", "by", "using", "xmlIsBlankNode", "()", "commodity", "function", "to", "detect", "the", "empty", "nodes", "generated", ".", "This", "value", "also", "affect", "autogeneration", "of", "indentation", "when", "saving", "code", "if", "blanks", "sections", "are", "kept", "indentation", "is", "not", "generated", "." ]
def keepBlanksDefault(val): """Set and return the previous value for default blanks text nodes support. The 1.x version of the parser used an heuristic to try to detect ignorable white spaces. As a result the SAX callback was generating xmlSAX2IgnorableWhitespace() callbacks instead of characters() one, and when using the DOM output text nodes containing those blanks were not generated. The 2.x and later version will switch to the XML standard way and ignorableWhitespace() are only generated when running the parser in validating mode and when the current element doesn't allow CDATA or mixed content. This function is provided as a way to force the standard behavior on 1.X libs and to switch back to the old mode for compatibility when running 1.X client code on 2.X . Upgrade of 1.X code should be done by using xmlIsBlankNode() commodity function to detect the "empty" nodes generated. This value also affect autogeneration of indentation when saving code if blanks sections are kept, indentation is not generated. """ ret = libxml2mod.xmlKeepBlanksDefault(val) return ret
[ "def", "keepBlanksDefault", "(", "val", ")", ":", "ret", "=", "libxml2mod", ".", "xmlKeepBlanksDefault", "(", "val", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L1281-L1301
chromiumembedded/cef
80caf947f3fe2210e5344713c5281d8af9bdc295
tools/yapf/yapf/yapflib/format_token.py
python
FormatToken.subtypes
(self)
return [Subtype.NONE] if value is None else value
Extra type information for directing formatting.
Extra type information for directing formatting.
[ "Extra", "type", "information", "for", "directing", "formatting", "." ]
def subtypes(self): """Extra type information for directing formatting.""" value = pytree_utils.GetNodeAnnotation(self.node, pytree_utils.Annotation.SUBTYPE) return [Subtype.NONE] if value is None else value
[ "def", "subtypes", "(", "self", ")", ":", "value", "=", "pytree_utils", ".", "GetNodeAnnotation", "(", "self", ".", "node", ",", "pytree_utils", ".", "Annotation", ".", "SUBTYPE", ")", "return", "[", "Subtype", ".", "NONE", "]", "if", "value", "is", "None", "else", "value" ]
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/yapf/yapf/yapflib/format_token.py#L219-L223
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/telemetry/telemetry/page/page_set.py
python
PageSet.ReorderPageSet
(self, results_file)
return pages
Reorders this page set based on the results of a past run.
Reorders this page set based on the results of a past run.
[ "Reorders", "this", "page", "set", "based", "on", "the", "results", "of", "a", "past", "run", "." ]
def ReorderPageSet(self, results_file): """Reorders this page set based on the results of a past run.""" page_set_dict = {} for page in self.pages: page_set_dict[page.url] = page pages = [] with open(results_file, 'rb') as csv_file: csv_reader = csv.reader(csv_file) csv_header = csv_reader.next() if 'url' not in csv_header: raise Exception('Unusable results_file.') url_index = csv_header.index('url') for csv_row in csv_reader: if csv_row[url_index] in page_set_dict: pages.append(page_set_dict[csv_row[url_index]]) else: raise Exception('Unusable results_file.') return pages
[ "def", "ReorderPageSet", "(", "self", ",", "results_file", ")", ":", "page_set_dict", "=", "{", "}", "for", "page", "in", "self", ".", "pages", ":", "page_set_dict", "[", "page", ".", "url", "]", "=", "page", "pages", "=", "[", "]", "with", "open", "(", "results_file", ",", "'rb'", ")", "as", "csv_file", ":", "csv_reader", "=", "csv", ".", "reader", "(", "csv_file", ")", "csv_header", "=", "csv_reader", ".", "next", "(", ")", "if", "'url'", "not", "in", "csv_header", ":", "raise", "Exception", "(", "'Unusable results_file.'", ")", "url_index", "=", "csv_header", ".", "index", "(", "'url'", ")", "for", "csv_row", "in", "csv_reader", ":", "if", "csv_row", "[", "url_index", "]", "in", "page_set_dict", ":", "pages", ".", "append", "(", "page_set_dict", "[", "csv_row", "[", "url_index", "]", "]", ")", "else", ":", "raise", "Exception", "(", "'Unusable results_file.'", ")", "return", "pages" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/telemetry/page/page_set.py#L111-L133
BlzFans/wke
b0fa21158312e40c5fbd84682d643022b6c34a93
cygwin/lib/python2.6/pdb.py
python
Pdb.displayhook
(self, obj)
Custom displayhook for the exec in default(), which prevents assignment of the _ variable in the builtins.
Custom displayhook for the exec in default(), which prevents assignment of the _ variable in the builtins.
[ "Custom", "displayhook", "for", "the", "exec", "in", "default", "()", "which", "prevents", "assignment", "of", "the", "_", "variable", "in", "the", "builtins", "." ]
def displayhook(self, obj): """Custom displayhook for the exec in default(), which prevents assignment of the _ variable in the builtins. """ # reproduce the behavior of the standard displayhook, not printing None if obj is not None: print repr(obj)
[ "def", "displayhook", "(", "self", ",", "obj", ")", ":", "# reproduce the behavior of the standard displayhook, not printing None", "if", "obj", "is", "not", "None", ":", "print", "repr", "(", "obj", ")" ]
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/pdb.py#L197-L203
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Sizer.PrependWindow
(self, *args, **kw)
return self.Prepend(*args, **kw)
Compatibility alias for `Prepend`.
Compatibility alias for `Prepend`.
[ "Compatibility", "alias", "for", "Prepend", "." ]
def PrependWindow(self, *args, **kw): """Compatibility alias for `Prepend`.""" return self.Prepend(*args, **kw)
[ "def", "PrependWindow", "(", "self", ",", "*", "args", ",", "*", "*", "kw", ")", ":", "return", "self", ".", "Prepend", "(", "*", "args", ",", "*", "*", "kw", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L14724-L14726
GoSSIP-SJTU/TripleDoggy
03648d6b19c812504b14e8b98c8c7b3f443f4e54
tools/clang/bindings/python/clang/cindex.py
python
TranslationUnit.reparse
(self, unsaved_files=None, options=0)
Reparse an already parsed translation unit. In-memory contents for files can be provided by passing a list of pairs as unsaved_files, the first items should be the filenames to be mapped and the second should be the contents to be substituted for the file. The contents may be passed as strings or file objects.
Reparse an already parsed translation unit.
[ "Reparse", "an", "already", "parsed", "translation", "unit", "." ]
def reparse(self, unsaved_files=None, options=0): """ Reparse an already parsed translation unit. In-memory contents for files can be provided by passing a list of pairs as unsaved_files, the first items should be the filenames to be mapped and the second should be the contents to be substituted for the file. The contents may be passed as strings or file objects. """ if unsaved_files is None: unsaved_files = [] unsaved_files_array = 0 if len(unsaved_files): unsaved_files_array = (_CXUnsavedFile * len(unsaved_files))() for i,(name,value) in enumerate(unsaved_files): if not isinstance(value, str): # FIXME: It would be great to support an efficient version # of this, one day. value = value.read() print(value) if not isinstance(value, str): raise TypeError('Unexpected unsaved file contents.') unsaved_files_array[i].name = name unsaved_files_array[i].contents = value unsaved_files_array[i].length = len(value) ptr = conf.lib.clang_reparseTranslationUnit(self, len(unsaved_files), unsaved_files_array, options)
[ "def", "reparse", "(", "self", ",", "unsaved_files", "=", "None", ",", "options", "=", "0", ")", ":", "if", "unsaved_files", "is", "None", ":", "unsaved_files", "=", "[", "]", "unsaved_files_array", "=", "0", "if", "len", "(", "unsaved_files", ")", ":", "unsaved_files_array", "=", "(", "_CXUnsavedFile", "*", "len", "(", "unsaved_files", ")", ")", "(", ")", "for", "i", ",", "(", "name", ",", "value", ")", "in", "enumerate", "(", "unsaved_files", ")", ":", "if", "not", "isinstance", "(", "value", ",", "str", ")", ":", "# FIXME: It would be great to support an efficient version", "# of this, one day.", "value", "=", "value", ".", "read", "(", ")", "print", "(", "value", ")", "if", "not", "isinstance", "(", "value", ",", "str", ")", ":", "raise", "TypeError", "(", "'Unexpected unsaved file contents.'", ")", "unsaved_files_array", "[", "i", "]", ".", "name", "=", "name", "unsaved_files_array", "[", "i", "]", ".", "contents", "=", "value", "unsaved_files_array", "[", "i", "]", ".", "length", "=", "len", "(", "value", ")", "ptr", "=", "conf", ".", "lib", ".", "clang_reparseTranslationUnit", "(", "self", ",", "len", "(", "unsaved_files", ")", ",", "unsaved_files_array", ",", "options", ")" ]
https://github.com/GoSSIP-SJTU/TripleDoggy/blob/03648d6b19c812504b14e8b98c8c7b3f443f4e54/tools/clang/bindings/python/clang/cindex.py#L2951-L2978
SeisSol/SeisSol
955fbeb8c5d40d3363a2da0edc611259aebe1653
preprocessing/science/kinematic_models/FaultPlane.py
python
FaultPlane.upsample_fault
(self, spatial_order, spatial_zoom, temporal_zoom, proj, use_Yoffe=False, time_smoothing_kernel_as_dt_fraction=0.5)
return pf
increase spatial and temporal resolution of kinematic model by interpolation
increase spatial and temporal resolution of kinematic model by interpolation
[ "increase", "spatial", "and", "temporal", "resolution", "of", "kinematic", "model", "by", "interpolation" ]
def upsample_fault(self, spatial_order, spatial_zoom, temporal_zoom, proj, use_Yoffe=False, time_smoothing_kernel_as_dt_fraction=0.5): "increase spatial and temporal resolution of kinematic model by interpolation" # time vector ndt2 = (self.ndt - 1) * temporal_zoom + 1 ny2, nx2 = self.ny * spatial_zoom, self.nx * spatial_zoom # resampled source pf = FaultPlane() pf.init_spatial_arrays(nx2, ny2) pf.ndt = ndt2 pf.init_aSR() pf.dt = self.dt / temporal_zoom pf.compute_time_array() # upsample spatially geometry (bilinear interpolation) allarr = np.array([self.x, self.y, self.depth]) pf.x, pf.y, pf.depth = upsample_quantities(allarr, spatial_order=1, spatial_zoom=spatial_zoom, padding="extrapolate") # upsample other quantities allarr = np.array([self.t0, self.strike, self.dip, self.rake]) pf.t0, pf.strike, pf.dip, pf.rake = upsample_quantities(allarr, spatial_order, spatial_zoom, padding="edge") # the interpolation may generate some acausality that we here prevent pf.t0 = np.maximum(pf.t0, np.amin(self.t0)) allarr = np.array([self.slip1]) (pf.slip1,) = upsample_quantities(allarr, spatial_order, spatial_zoom, padding="constant", minimize_block_average_variations=True) pf.compute_latlon_from_xy(proj) pf.PSarea_cm2 = self.PSarea_cm2 / spatial_zoom ** 2 ratio_potency = np.sum(pf.slip1) * pf.PSarea_cm2 / (np.sum(self.slip1) * self.PSarea_cm2) print(f"seismic potency ratio (upscaled over initial): {ratio_potency}") if use_Yoffe: allarr = np.array([self.rise_time, self.tacc]) pf.rise_time, pf.tacc = upsample_quantities(allarr, spatial_order, spatial_zoom, padding="edge") pf.rise_time = np.maximum(pf.rise_time, np.amin(self.rise_time)) pf.tacc = np.maximum(pf.tacc, np.amin(self.tacc)) # see comment above explaining why the 1.27 factor print("using ts = tacc / 1.27 to compute the regularized Yoffe") ts = pf.tacc / 1.27 tr = pf.rise_time - 2.0 * ts tr = np.maximum(tr, ts) for j in range(pf.ny): for i in range(pf.nx): for k, tk in enumerate(pf.myt): pf.aSR[j, i, k] = pf.slip1[j, i] * regularizedYoffe(tk, ts[j, i], tr[j, i]) else: aSRa = np.zeros((pf.ny, pf.nx, self.ndt)) for k in range(self.ndt): aSRa[:, :, k] = upsample_quantities(np.array([self.aSR[:, :, k]]), spatial_order, spatial_zoom, padding="constant") # interpolate temporally the AST for j in range(pf.ny): for i in range(pf.nx): # 1. upsample with linear interpolation # 2. apply a gauss kernel to smooth out sharp edges # 3. tapper the signal smoothly to 0 at both time ends # 4. rescale SR to ensure integral (SR) = slip f = interpolate.interp1d(self.myt, aSRa[j, i, :], kind="linear") pf.aSR[j, i, :] = f(pf.myt) tapper = cosine_taper(pf.ndt, self.dt / (pf.ndt * pf.dt)) pf.aSR[j, i, :] = tapper * ndimage.gaussian_filter1d(pf.aSR[j, i, :], time_smoothing_kernel_as_dt_fraction * self.dt / pf.dt, mode="constant") # With a cubic interpolation, the interpolated slip1 may be negative which does not make sense. if pf.slip1[j, i] < 0: pf.aSR[j, i, :] = 0 continue # should be the SR integral_STF = np.trapz(np.abs(pf.aSR[j, i, :]), dx=pf.dt) if abs(integral_STF) > 0: pf.aSR[j, i, :] = pf.slip1[j, i] * pf.aSR[j, i, :] / integral_STF return pf
[ "def", "upsample_fault", "(", "self", ",", "spatial_order", ",", "spatial_zoom", ",", "temporal_zoom", ",", "proj", ",", "use_Yoffe", "=", "False", ",", "time_smoothing_kernel_as_dt_fraction", "=", "0.5", ")", ":", "# time vector", "ndt2", "=", "(", "self", ".", "ndt", "-", "1", ")", "*", "temporal_zoom", "+", "1", "ny2", ",", "nx2", "=", "self", ".", "ny", "*", "spatial_zoom", ",", "self", ".", "nx", "*", "spatial_zoom", "# resampled source", "pf", "=", "FaultPlane", "(", ")", "pf", ".", "init_spatial_arrays", "(", "nx2", ",", "ny2", ")", "pf", ".", "ndt", "=", "ndt2", "pf", ".", "init_aSR", "(", ")", "pf", ".", "dt", "=", "self", ".", "dt", "/", "temporal_zoom", "pf", ".", "compute_time_array", "(", ")", "# upsample spatially geometry (bilinear interpolation)", "allarr", "=", "np", ".", "array", "(", "[", "self", ".", "x", ",", "self", ".", "y", ",", "self", ".", "depth", "]", ")", "pf", ".", "x", ",", "pf", ".", "y", ",", "pf", ".", "depth", "=", "upsample_quantities", "(", "allarr", ",", "spatial_order", "=", "1", ",", "spatial_zoom", "=", "spatial_zoom", ",", "padding", "=", "\"extrapolate\"", ")", "# upsample other quantities", "allarr", "=", "np", ".", "array", "(", "[", "self", ".", "t0", ",", "self", ".", "strike", ",", "self", ".", "dip", ",", "self", ".", "rake", "]", ")", "pf", ".", "t0", ",", "pf", ".", "strike", ",", "pf", ".", "dip", ",", "pf", ".", "rake", "=", "upsample_quantities", "(", "allarr", ",", "spatial_order", ",", "spatial_zoom", ",", "padding", "=", "\"edge\"", ")", "# the interpolation may generate some acausality that we here prevent", "pf", ".", "t0", "=", "np", ".", "maximum", "(", "pf", ".", "t0", ",", "np", ".", "amin", "(", "self", ".", "t0", ")", ")", "allarr", "=", "np", ".", "array", "(", "[", "self", ".", "slip1", "]", ")", "(", "pf", ".", "slip1", ",", ")", "=", "upsample_quantities", "(", "allarr", ",", "spatial_order", ",", "spatial_zoom", ",", "padding", "=", "\"constant\"", ",", "minimize_block_average_variations", "=", "True", ")", "pf", ".", "compute_latlon_from_xy", "(", "proj", ")", "pf", ".", "PSarea_cm2", "=", "self", ".", "PSarea_cm2", "/", "spatial_zoom", "**", "2", "ratio_potency", "=", "np", ".", "sum", "(", "pf", ".", "slip1", ")", "*", "pf", ".", "PSarea_cm2", "/", "(", "np", ".", "sum", "(", "self", ".", "slip1", ")", "*", "self", ".", "PSarea_cm2", ")", "print", "(", "f\"seismic potency ratio (upscaled over initial): {ratio_potency}\"", ")", "if", "use_Yoffe", ":", "allarr", "=", "np", ".", "array", "(", "[", "self", ".", "rise_time", ",", "self", ".", "tacc", "]", ")", "pf", ".", "rise_time", ",", "pf", ".", "tacc", "=", "upsample_quantities", "(", "allarr", ",", "spatial_order", ",", "spatial_zoom", ",", "padding", "=", "\"edge\"", ")", "pf", ".", "rise_time", "=", "np", ".", "maximum", "(", "pf", ".", "rise_time", ",", "np", ".", "amin", "(", "self", ".", "rise_time", ")", ")", "pf", ".", "tacc", "=", "np", ".", "maximum", "(", "pf", ".", "tacc", ",", "np", ".", "amin", "(", "self", ".", "tacc", ")", ")", "# see comment above explaining why the 1.27 factor", "print", "(", "\"using ts = tacc / 1.27 to compute the regularized Yoffe\"", ")", "ts", "=", "pf", ".", "tacc", "/", "1.27", "tr", "=", "pf", ".", "rise_time", "-", "2.0", "*", "ts", "tr", "=", "np", ".", "maximum", "(", "tr", ",", "ts", ")", "for", "j", "in", "range", "(", "pf", ".", "ny", ")", ":", "for", "i", "in", "range", "(", "pf", ".", "nx", ")", ":", "for", "k", ",", "tk", "in", "enumerate", "(", "pf", ".", "myt", ")", ":", "pf", ".", "aSR", "[", "j", ",", "i", ",", "k", "]", "=", "pf", ".", "slip1", "[", "j", ",", "i", "]", "*", "regularizedYoffe", "(", "tk", ",", "ts", "[", "j", ",", "i", "]", ",", "tr", "[", "j", ",", "i", "]", ")", "else", ":", "aSRa", "=", "np", ".", "zeros", "(", "(", "pf", ".", "ny", ",", "pf", ".", "nx", ",", "self", ".", "ndt", ")", ")", "for", "k", "in", "range", "(", "self", ".", "ndt", ")", ":", "aSRa", "[", ":", ",", ":", ",", "k", "]", "=", "upsample_quantities", "(", "np", ".", "array", "(", "[", "self", ".", "aSR", "[", ":", ",", ":", ",", "k", "]", "]", ")", ",", "spatial_order", ",", "spatial_zoom", ",", "padding", "=", "\"constant\"", ")", "# interpolate temporally the AST", "for", "j", "in", "range", "(", "pf", ".", "ny", ")", ":", "for", "i", "in", "range", "(", "pf", ".", "nx", ")", ":", "# 1. upsample with linear interpolation", "# 2. apply a gauss kernel to smooth out sharp edges", "# 3. tapper the signal smoothly to 0 at both time ends", "# 4. rescale SR to ensure integral (SR) = slip", "f", "=", "interpolate", ".", "interp1d", "(", "self", ".", "myt", ",", "aSRa", "[", "j", ",", "i", ",", ":", "]", ",", "kind", "=", "\"linear\"", ")", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", "=", "f", "(", "pf", ".", "myt", ")", "tapper", "=", "cosine_taper", "(", "pf", ".", "ndt", ",", "self", ".", "dt", "/", "(", "pf", ".", "ndt", "*", "pf", ".", "dt", ")", ")", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", "=", "tapper", "*", "ndimage", ".", "gaussian_filter1d", "(", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", ",", "time_smoothing_kernel_as_dt_fraction", "*", "self", ".", "dt", "/", "pf", ".", "dt", ",", "mode", "=", "\"constant\"", ")", "# With a cubic interpolation, the interpolated slip1 may be negative which does not make sense.", "if", "pf", ".", "slip1", "[", "j", ",", "i", "]", "<", "0", ":", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", "=", "0", "continue", "# should be the SR", "integral_STF", "=", "np", ".", "trapz", "(", "np", ".", "abs", "(", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", ")", ",", "dx", "=", "pf", ".", "dt", ")", "if", "abs", "(", "integral_STF", ")", ">", "0", ":", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", "=", "pf", ".", "slip1", "[", "j", ",", "i", "]", "*", "pf", ".", "aSR", "[", "j", ",", "i", ",", ":", "]", "/", "integral_STF", "return", "pf" ]
https://github.com/SeisSol/SeisSol/blob/955fbeb8c5d40d3363a2da0edc611259aebe1653/preprocessing/science/kinematic_models/FaultPlane.py#L429-L498
thalium/icebox
99d147d5b9269222225443ce171b4fd46d8985d4
third_party/virtualbox/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
python
SimpleParser.parseTagOpDisEnum
(self, sTag, aasSections, iTagLine, iEndLine)
return True
Tag: \@opdisenum Value: OP_XXXX This is for select a specific (legacy) disassembler enum value for the instruction.
Tag: \@opdisenum Value: OP_XXXX
[ "Tag", ":", "\\", "@opdisenum", "Value", ":", "OP_XXXX" ]
def parseTagOpDisEnum(self, sTag, aasSections, iTagLine, iEndLine): """ Tag: \@opdisenum Value: OP_XXXX This is for select a specific (legacy) disassembler enum value for the instruction. """ oInstr = self.ensureInstructionForOpTag(iTagLine); # Flatten and split. asWords = self.flattenAllSections(aasSections).split(); if len(asWords) != 1: self.errorComment(iTagLine, '%s: expected exactly one value: %s' % (sTag, asWords,)); if not asWords: return False; sDisEnum = asWords[0]; if not self.oReDisEnum.match(sDisEnum): return self.errorComment(iTagLine, '%s: invalid disassembler OP_XXXX enum: %s (pattern: %s)' % (sTag, sDisEnum, self.oReDisEnum.pattern)); # Set it. if oInstr.sDisEnum is not None: return self.errorComment(iTagLine, '%s: attempting to overwrite "%s" with "%s"' % (sTag, oInstr.sDisEnum, sDisEnum,)); oInstr.sDisEnum = sDisEnum; _ = iEndLine; return True;
[ "def", "parseTagOpDisEnum", "(", "self", ",", "sTag", ",", "aasSections", ",", "iTagLine", ",", "iEndLine", ")", ":", "oInstr", "=", "self", ".", "ensureInstructionForOpTag", "(", "iTagLine", ")", "# Flatten and split.", "asWords", "=", "self", ".", "flattenAllSections", "(", "aasSections", ")", ".", "split", "(", ")", "if", "len", "(", "asWords", ")", "!=", "1", ":", "self", ".", "errorComment", "(", "iTagLine", ",", "'%s: expected exactly one value: %s'", "%", "(", "sTag", ",", "asWords", ",", ")", ")", "if", "not", "asWords", ":", "return", "False", "sDisEnum", "=", "asWords", "[", "0", "]", "if", "not", "self", ".", "oReDisEnum", ".", "match", "(", "sDisEnum", ")", ":", "return", "self", ".", "errorComment", "(", "iTagLine", ",", "'%s: invalid disassembler OP_XXXX enum: %s (pattern: %s)'", "%", "(", "sTag", ",", "sDisEnum", ",", "self", ".", "oReDisEnum", ".", "pattern", ")", ")", "# Set it.", "if", "oInstr", ".", "sDisEnum", "is", "not", "None", ":", "return", "self", ".", "errorComment", "(", "iTagLine", ",", "'%s: attempting to overwrite \"%s\" with \"%s\"'", "%", "(", "sTag", ",", "oInstr", ".", "sDisEnum", ",", "sDisEnum", ",", ")", ")", "oInstr", ".", "sDisEnum", "=", "sDisEnum", "_", "=", "iEndLine", "return", "True" ]
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py#L2310-L2337
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Path/PathScripts/PathOpTools.py
python
offsetWire
(wire, base, offset, forward, Side=None)
return orientWire(Part.Wire(edges), None)
offsetWire(wire, base, offset, forward) ... offsets the wire away from base and orients the wire accordingly. The function tries to avoid most of the pitfalls of Part.makeOffset2D which is possible because all offsetting happens in the XY plane.
offsetWire(wire, base, offset, forward) ... offsets the wire away from base and orients the wire accordingly. The function tries to avoid most of the pitfalls of Part.makeOffset2D which is possible because all offsetting happens in the XY plane.
[ "offsetWire", "(", "wire", "base", "offset", "forward", ")", "...", "offsets", "the", "wire", "away", "from", "base", "and", "orients", "the", "wire", "accordingly", ".", "The", "function", "tries", "to", "avoid", "most", "of", "the", "pitfalls", "of", "Part", ".", "makeOffset2D", "which", "is", "possible", "because", "all", "offsetting", "happens", "in", "the", "XY", "plane", "." ]
def offsetWire(wire, base, offset, forward, Side=None): """offsetWire(wire, base, offset, forward) ... offsets the wire away from base and orients the wire accordingly. The function tries to avoid most of the pitfalls of Part.makeOffset2D which is possible because all offsetting happens in the XY plane. """ PathLog.track("offsetWire") if 1 == len(wire.Edges): edge = wire.Edges[0] curve = edge.Curve if Part.Circle == type(curve) and wire.isClosed(): # it's a full circle and there are some problems with that, see # https://www.freecadweb.org/wiki/Part%20Offset2D # it's easy to construct them manually though z = -1 if forward else 1 new_edge = Part.makeCircle( curve.Radius + offset, curve.Center, FreeCAD.Vector(0, 0, z) ) if base.isInside(new_edge.Vertexes[0].Point, offset / 2, True): if offset > curve.Radius or PathGeom.isRoughly(offset, curve.Radius): # offsetting a hole by its own radius (or more) makes the hole vanish return None if Side: Side[0] = "Inside" print("inside") new_edge = Part.makeCircle( curve.Radius - offset, curve.Center, FreeCAD.Vector(0, 0, -z) ) return Part.Wire([new_edge]) if Part.Circle == type(curve) and not wire.isClosed(): # Process arc segment z = -1 if forward else 1 l1 = math.sqrt( (edge.Vertexes[0].Point.x - curve.Center.x) ** 2 + (edge.Vertexes[0].Point.y - curve.Center.y) ** 2 ) l2 = math.sqrt( (edge.Vertexes[1].Point.x - curve.Center.x) ** 2 + (edge.Vertexes[1].Point.y - curve.Center.y) ** 2 ) # Calculate angles based on x-axis (0 - PI/2) start_angle = math.acos((edge.Vertexes[0].Point.x - curve.Center.x) / l1) end_angle = math.acos((edge.Vertexes[1].Point.x - curve.Center.x) / l2) # Angles are based on x-axis (Mirrored on x-axis) -> negative y value means negative angle if edge.Vertexes[0].Point.y < curve.Center.y: start_angle *= -1 if edge.Vertexes[1].Point.y < curve.Center.y: end_angle *= -1 if ( edge.Vertexes[0].Point.x > curve.Center.x or edge.Vertexes[1].Point.x > curve.Center.x ) and curve.AngleXU < 0: tmp = start_angle start_angle = end_angle end_angle = tmp # Inside / Outside if base.isInside(edge.Vertexes[0].Point, offset / 2, True): offset *= -1 if Side: Side[0] = "Inside" # Create new arc if curve.AngleXU > 0: edge = Part.ArcOfCircle( Part.Circle( curve.Center, FreeCAD.Vector(0, 0, 1), curve.Radius + offset ), start_angle, end_angle, ).toShape() else: edge = Part.ArcOfCircle( Part.Circle( curve.Center, FreeCAD.Vector(0, 0, 1), curve.Radius - offset ), start_angle, end_angle, ).toShape() return Part.Wire([edge]) if Part.Line == type(curve) or Part.LineSegment == type(curve): # offsetting a single edge doesn't work because there is an infinite # possible planes into which the edge could be offset # luckily, the plane here must be the XY-plane ... p0 = edge.Vertexes[0].Point v0 = edge.Vertexes[1].Point - p0 n = v0.cross(FreeCAD.Vector(0, 0, 1)) o = n.normalize() * offset edge.translate(o) # offset edde the other way if the result is inside if base.isInside( edge.valueAt((edge.FirstParameter + edge.LastParameter) / 2), offset / 2, True, ): edge.translate(-2 * o) # flip the edge if it's not on the right side of the original edge if forward is not None: v1 = edge.Vertexes[1].Point - p0 left = PathGeom.Side.Left == PathGeom.Side.of(v0, v1) if left != forward: edge = PathGeom.flipEdge(edge) return Part.Wire([edge]) # if we get to this point the assumption is that makeOffset2D can deal with the edge pass # pylint: disable=unnecessary-pass owire = orientWire(wire.makeOffset2D(offset), True) debugWire("makeOffset2D_%d" % len(wire.Edges), owire) if wire.isClosed(): if not base.isInside(owire.Edges[0].Vertexes[0].Point, offset / 2, True): PathLog.track("closed - outside") if Side: Side[0] = "Outside" return orientWire(owire, forward) PathLog.track("closed - inside") if Side: Side[0] = "Inside" try: owire = wire.makeOffset2D(-offset) except Exception: # pylint: disable=broad-except # most likely offsetting didn't work because the wire is a hole # and the offset is too big - making the hole vanish return None # For negative offsets (holes) 'forward' is the other way if forward is None: return orientWire(owire, None) return orientWire(owire, not forward) # An edge is considered to be inside of shape if the mid point is inside # Of the remaining edges we take the longest wire to be the engraving side # Looking for a circle with the start vertex as center marks and end # starting from there follow the edges until a circle with the end vertex as center is found # if the traversed edges include any of the remaining from above, all those edges are remaining # this is to also include edges which might partially be inside shape # if they need to be discarded, split, that should happen in a post process # Depending on the Axis of the circle, and which side remains we know if the wire needs to be flipped # first, let's make sure all edges are oriented the proper way edges = _orientEdges(wire.Edges) # determine the start and end point start = edges[0].firstVertex().Point end = edges[-1].lastVertex().Point debugWire("wire", wire) debugWire("wedges", Part.Wire(edges)) # find edges that are not inside the shape common = base.common(owire) insideEndpoints = [e.lastVertex().Point for e in common.Edges] insideEndpoints.append(common.Edges[0].firstVertex().Point) def isInside(edge): p0 = edge.firstVertex().Point p1 = edge.lastVertex().Point for p in insideEndpoints: if PathGeom.pointsCoincide(p, p0, 0.01) or PathGeom.pointsCoincide( p, p1, 0.01 ): return True return False outside = [e for e in owire.Edges if not isInside(e)] # discard all edges that are not part of the longest wire longestWire = None for w in [Part.Wire(el) for el in Part.sortEdges(outside)]: if not longestWire or longestWire.Length < w.Length: longestWire = w debugWire("outside", Part.Wire(outside)) debugWire("longest", longestWire) def isCircleAt(edge, center): """isCircleAt(edge, center) ... helper function returns True if edge is a circle at the given center.""" if Part.Circle == type(edge.Curve) or Part.ArcOfCircle == type(edge.Curve): return PathGeom.pointsCoincide(edge.Curve.Center, center) return False # split offset wire into edges to the left side and edges to the right side collectLeft = False collectRight = False leftSideEdges = [] rightSideEdges = [] # traverse through all edges in order and start collecting them when we encounter # an end point (circle centered at one of the end points of the original wire). # should we come to an end point and determine that we've already collected the # next side, we're done for e in owire.Edges + owire.Edges: if isCircleAt(e, start): if PathGeom.pointsCoincide(e.Curve.Axis, FreeCAD.Vector(0, 0, 1)): if not collectLeft and leftSideEdges: break collectLeft = True collectRight = False else: if not collectRight and rightSideEdges: break collectLeft = False collectRight = True elif isCircleAt(e, end): if PathGeom.pointsCoincide(e.Curve.Axis, FreeCAD.Vector(0, 0, 1)): if not collectRight and rightSideEdges: break collectLeft = False collectRight = True else: if not collectLeft and leftSideEdges: break collectLeft = True collectRight = False elif collectLeft: leftSideEdges.append(e) elif collectRight: rightSideEdges.append(e) debugWire("left", Part.Wire(leftSideEdges)) debugWire("right", Part.Wire(rightSideEdges)) # figure out if all the left sided edges or the right sided edges are the ones # that are 'outside'. However, we return the full side. edges = leftSideEdges for e in longestWire.Edges: for e0 in rightSideEdges: if PathGeom.edgesMatch(e, e0): edges = rightSideEdges PathLog.debug("#use right side edges") if not forward: PathLog.debug("#reverse") edges.reverse() return orientWire(Part.Wire(edges), None) # at this point we have the correct edges and they are in the order for forward # traversal (climb milling). If that's not what we want just reverse the order, # orientWire takes care of orienting the edges appropriately. PathLog.debug("#use left side edges") if not forward: PathLog.debug("#reverse") edges.reverse() return orientWire(Part.Wire(edges), None)
[ "def", "offsetWire", "(", "wire", ",", "base", ",", "offset", ",", "forward", ",", "Side", "=", "None", ")", ":", "PathLog", ".", "track", "(", "\"offsetWire\"", ")", "if", "1", "==", "len", "(", "wire", ".", "Edges", ")", ":", "edge", "=", "wire", ".", "Edges", "[", "0", "]", "curve", "=", "edge", ".", "Curve", "if", "Part", ".", "Circle", "==", "type", "(", "curve", ")", "and", "wire", ".", "isClosed", "(", ")", ":", "# it's a full circle and there are some problems with that, see", "# https://www.freecadweb.org/wiki/Part%20Offset2D", "# it's easy to construct them manually though", "z", "=", "-", "1", "if", "forward", "else", "1", "new_edge", "=", "Part", ".", "makeCircle", "(", "curve", ".", "Radius", "+", "offset", ",", "curve", ".", "Center", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "z", ")", ")", "if", "base", ".", "isInside", "(", "new_edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ",", "offset", "/", "2", ",", "True", ")", ":", "if", "offset", ">", "curve", ".", "Radius", "or", "PathGeom", ".", "isRoughly", "(", "offset", ",", "curve", ".", "Radius", ")", ":", "# offsetting a hole by its own radius (or more) makes the hole vanish", "return", "None", "if", "Side", ":", "Side", "[", "0", "]", "=", "\"Inside\"", "print", "(", "\"inside\"", ")", "new_edge", "=", "Part", ".", "makeCircle", "(", "curve", ".", "Radius", "-", "offset", ",", "curve", ".", "Center", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "-", "z", ")", ")", "return", "Part", ".", "Wire", "(", "[", "new_edge", "]", ")", "if", "Part", ".", "Circle", "==", "type", "(", "curve", ")", "and", "not", "wire", ".", "isClosed", "(", ")", ":", "# Process arc segment", "z", "=", "-", "1", "if", "forward", "else", "1", "l1", "=", "math", ".", "sqrt", "(", "(", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ".", "x", "-", "curve", ".", "Center", ".", "x", ")", "**", "2", "+", "(", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ".", "y", "-", "curve", ".", "Center", ".", "y", ")", "**", "2", ")", "l2", "=", "math", ".", "sqrt", "(", "(", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", ".", "x", "-", "curve", ".", "Center", ".", "x", ")", "**", "2", "+", "(", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", ".", "y", "-", "curve", ".", "Center", ".", "y", ")", "**", "2", ")", "# Calculate angles based on x-axis (0 - PI/2)", "start_angle", "=", "math", ".", "acos", "(", "(", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ".", "x", "-", "curve", ".", "Center", ".", "x", ")", "/", "l1", ")", "end_angle", "=", "math", ".", "acos", "(", "(", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", ".", "x", "-", "curve", ".", "Center", ".", "x", ")", "/", "l2", ")", "# Angles are based on x-axis (Mirrored on x-axis) -> negative y value means negative angle", "if", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ".", "y", "<", "curve", ".", "Center", ".", "y", ":", "start_angle", "*=", "-", "1", "if", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", ".", "y", "<", "curve", ".", "Center", ".", "y", ":", "end_angle", "*=", "-", "1", "if", "(", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ".", "x", ">", "curve", ".", "Center", ".", "x", "or", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", ".", "x", ">", "curve", ".", "Center", ".", "x", ")", "and", "curve", ".", "AngleXU", "<", "0", ":", "tmp", "=", "start_angle", "start_angle", "=", "end_angle", "end_angle", "=", "tmp", "# Inside / Outside", "if", "base", ".", "isInside", "(", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", ",", "offset", "/", "2", ",", "True", ")", ":", "offset", "*=", "-", "1", "if", "Side", ":", "Side", "[", "0", "]", "=", "\"Inside\"", "# Create new arc", "if", "curve", ".", "AngleXU", ">", "0", ":", "edge", "=", "Part", ".", "ArcOfCircle", "(", "Part", ".", "Circle", "(", "curve", ".", "Center", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "1", ")", ",", "curve", ".", "Radius", "+", "offset", ")", ",", "start_angle", ",", "end_angle", ",", ")", ".", "toShape", "(", ")", "else", ":", "edge", "=", "Part", ".", "ArcOfCircle", "(", "Part", ".", "Circle", "(", "curve", ".", "Center", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "1", ")", ",", "curve", ".", "Radius", "-", "offset", ")", ",", "start_angle", ",", "end_angle", ",", ")", ".", "toShape", "(", ")", "return", "Part", ".", "Wire", "(", "[", "edge", "]", ")", "if", "Part", ".", "Line", "==", "type", "(", "curve", ")", "or", "Part", ".", "LineSegment", "==", "type", "(", "curve", ")", ":", "# offsetting a single edge doesn't work because there is an infinite", "# possible planes into which the edge could be offset", "# luckily, the plane here must be the XY-plane ...", "p0", "=", "edge", ".", "Vertexes", "[", "0", "]", ".", "Point", "v0", "=", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", "-", "p0", "n", "=", "v0", ".", "cross", "(", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "1", ")", ")", "o", "=", "n", ".", "normalize", "(", ")", "*", "offset", "edge", ".", "translate", "(", "o", ")", "# offset edde the other way if the result is inside", "if", "base", ".", "isInside", "(", "edge", ".", "valueAt", "(", "(", "edge", ".", "FirstParameter", "+", "edge", ".", "LastParameter", ")", "/", "2", ")", ",", "offset", "/", "2", ",", "True", ",", ")", ":", "edge", ".", "translate", "(", "-", "2", "*", "o", ")", "# flip the edge if it's not on the right side of the original edge", "if", "forward", "is", "not", "None", ":", "v1", "=", "edge", ".", "Vertexes", "[", "1", "]", ".", "Point", "-", "p0", "left", "=", "PathGeom", ".", "Side", ".", "Left", "==", "PathGeom", ".", "Side", ".", "of", "(", "v0", ",", "v1", ")", "if", "left", "!=", "forward", ":", "edge", "=", "PathGeom", ".", "flipEdge", "(", "edge", ")", "return", "Part", ".", "Wire", "(", "[", "edge", "]", ")", "# if we get to this point the assumption is that makeOffset2D can deal with the edge", "pass", "# pylint: disable=unnecessary-pass", "owire", "=", "orientWire", "(", "wire", ".", "makeOffset2D", "(", "offset", ")", ",", "True", ")", "debugWire", "(", "\"makeOffset2D_%d\"", "%", "len", "(", "wire", ".", "Edges", ")", ",", "owire", ")", "if", "wire", ".", "isClosed", "(", ")", ":", "if", "not", "base", ".", "isInside", "(", "owire", ".", "Edges", "[", "0", "]", ".", "Vertexes", "[", "0", "]", ".", "Point", ",", "offset", "/", "2", ",", "True", ")", ":", "PathLog", ".", "track", "(", "\"closed - outside\"", ")", "if", "Side", ":", "Side", "[", "0", "]", "=", "\"Outside\"", "return", "orientWire", "(", "owire", ",", "forward", ")", "PathLog", ".", "track", "(", "\"closed - inside\"", ")", "if", "Side", ":", "Side", "[", "0", "]", "=", "\"Inside\"", "try", ":", "owire", "=", "wire", ".", "makeOffset2D", "(", "-", "offset", ")", "except", "Exception", ":", "# pylint: disable=broad-except", "# most likely offsetting didn't work because the wire is a hole", "# and the offset is too big - making the hole vanish", "return", "None", "# For negative offsets (holes) 'forward' is the other way", "if", "forward", "is", "None", ":", "return", "orientWire", "(", "owire", ",", "None", ")", "return", "orientWire", "(", "owire", ",", "not", "forward", ")", "# An edge is considered to be inside of shape if the mid point is inside", "# Of the remaining edges we take the longest wire to be the engraving side", "# Looking for a circle with the start vertex as center marks and end", "# starting from there follow the edges until a circle with the end vertex as center is found", "# if the traversed edges include any of the remaining from above, all those edges are remaining", "# this is to also include edges which might partially be inside shape", "# if they need to be discarded, split, that should happen in a post process", "# Depending on the Axis of the circle, and which side remains we know if the wire needs to be flipped", "# first, let's make sure all edges are oriented the proper way", "edges", "=", "_orientEdges", "(", "wire", ".", "Edges", ")", "# determine the start and end point", "start", "=", "edges", "[", "0", "]", ".", "firstVertex", "(", ")", ".", "Point", "end", "=", "edges", "[", "-", "1", "]", ".", "lastVertex", "(", ")", ".", "Point", "debugWire", "(", "\"wire\"", ",", "wire", ")", "debugWire", "(", "\"wedges\"", ",", "Part", ".", "Wire", "(", "edges", ")", ")", "# find edges that are not inside the shape", "common", "=", "base", ".", "common", "(", "owire", ")", "insideEndpoints", "=", "[", "e", ".", "lastVertex", "(", ")", ".", "Point", "for", "e", "in", "common", ".", "Edges", "]", "insideEndpoints", ".", "append", "(", "common", ".", "Edges", "[", "0", "]", ".", "firstVertex", "(", ")", ".", "Point", ")", "def", "isInside", "(", "edge", ")", ":", "p0", "=", "edge", ".", "firstVertex", "(", ")", ".", "Point", "p1", "=", "edge", ".", "lastVertex", "(", ")", ".", "Point", "for", "p", "in", "insideEndpoints", ":", "if", "PathGeom", ".", "pointsCoincide", "(", "p", ",", "p0", ",", "0.01", ")", "or", "PathGeom", ".", "pointsCoincide", "(", "p", ",", "p1", ",", "0.01", ")", ":", "return", "True", "return", "False", "outside", "=", "[", "e", "for", "e", "in", "owire", ".", "Edges", "if", "not", "isInside", "(", "e", ")", "]", "# discard all edges that are not part of the longest wire", "longestWire", "=", "None", "for", "w", "in", "[", "Part", ".", "Wire", "(", "el", ")", "for", "el", "in", "Part", ".", "sortEdges", "(", "outside", ")", "]", ":", "if", "not", "longestWire", "or", "longestWire", ".", "Length", "<", "w", ".", "Length", ":", "longestWire", "=", "w", "debugWire", "(", "\"outside\"", ",", "Part", ".", "Wire", "(", "outside", ")", ")", "debugWire", "(", "\"longest\"", ",", "longestWire", ")", "def", "isCircleAt", "(", "edge", ",", "center", ")", ":", "\"\"\"isCircleAt(edge, center) ... helper function returns True if edge is a circle at the given center.\"\"\"", "if", "Part", ".", "Circle", "==", "type", "(", "edge", ".", "Curve", ")", "or", "Part", ".", "ArcOfCircle", "==", "type", "(", "edge", ".", "Curve", ")", ":", "return", "PathGeom", ".", "pointsCoincide", "(", "edge", ".", "Curve", ".", "Center", ",", "center", ")", "return", "False", "# split offset wire into edges to the left side and edges to the right side", "collectLeft", "=", "False", "collectRight", "=", "False", "leftSideEdges", "=", "[", "]", "rightSideEdges", "=", "[", "]", "# traverse through all edges in order and start collecting them when we encounter", "# an end point (circle centered at one of the end points of the original wire).", "# should we come to an end point and determine that we've already collected the", "# next side, we're done", "for", "e", "in", "owire", ".", "Edges", "+", "owire", ".", "Edges", ":", "if", "isCircleAt", "(", "e", ",", "start", ")", ":", "if", "PathGeom", ".", "pointsCoincide", "(", "e", ".", "Curve", ".", "Axis", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "1", ")", ")", ":", "if", "not", "collectLeft", "and", "leftSideEdges", ":", "break", "collectLeft", "=", "True", "collectRight", "=", "False", "else", ":", "if", "not", "collectRight", "and", "rightSideEdges", ":", "break", "collectLeft", "=", "False", "collectRight", "=", "True", "elif", "isCircleAt", "(", "e", ",", "end", ")", ":", "if", "PathGeom", ".", "pointsCoincide", "(", "e", ".", "Curve", ".", "Axis", ",", "FreeCAD", ".", "Vector", "(", "0", ",", "0", ",", "1", ")", ")", ":", "if", "not", "collectRight", "and", "rightSideEdges", ":", "break", "collectLeft", "=", "False", "collectRight", "=", "True", "else", ":", "if", "not", "collectLeft", "and", "leftSideEdges", ":", "break", "collectLeft", "=", "True", "collectRight", "=", "False", "elif", "collectLeft", ":", "leftSideEdges", ".", "append", "(", "e", ")", "elif", "collectRight", ":", "rightSideEdges", ".", "append", "(", "e", ")", "debugWire", "(", "\"left\"", ",", "Part", ".", "Wire", "(", "leftSideEdges", ")", ")", "debugWire", "(", "\"right\"", ",", "Part", ".", "Wire", "(", "rightSideEdges", ")", ")", "# figure out if all the left sided edges or the right sided edges are the ones", "# that are 'outside'. However, we return the full side.", "edges", "=", "leftSideEdges", "for", "e", "in", "longestWire", ".", "Edges", ":", "for", "e0", "in", "rightSideEdges", ":", "if", "PathGeom", ".", "edgesMatch", "(", "e", ",", "e0", ")", ":", "edges", "=", "rightSideEdges", "PathLog", ".", "debug", "(", "\"#use right side edges\"", ")", "if", "not", "forward", ":", "PathLog", ".", "debug", "(", "\"#reverse\"", ")", "edges", ".", "reverse", "(", ")", "return", "orientWire", "(", "Part", ".", "Wire", "(", "edges", ")", ",", "None", ")", "# at this point we have the correct edges and they are in the order for forward", "# traversal (climb milling). If that's not what we want just reverse the order,", "# orientWire takes care of orienting the edges appropriately.", "PathLog", ".", "debug", "(", "\"#use left side edges\"", ")", "if", "not", "forward", ":", "PathLog", ".", "debug", "(", "\"#reverse\"", ")", "edges", ".", "reverse", "(", ")", "return", "orientWire", "(", "Part", ".", "Wire", "(", "edges", ")", ",", "None", ")" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Path/PathScripts/PathOpTools.py#L173-L423
hunterlew/mstar_deeplearning_project
3761624dcbd7d44af257200542d13d1444dc634a
classification/caffe/build/Release/pycaffe/caffe/coord_map.py
python
conv_params
(fn)
return (axis, np.array(params.get('stride', 1), ndmin=1), (ks - 1) * dilation + 1, np.array(params.get('pad', 0), ndmin=1))
Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation. Implementation detail: Convolution, Deconvolution, and Im2col layers define these in the convolution_param message, while Pooling has its own fields in pooling_param. This method deals with these details to extract canonical parameters.
Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation.
[ "Extract", "the", "spatial", "parameters", "that", "determine", "the", "coordinate", "mapping", ":", "kernel", "size", "stride", "padding", "and", "dilation", "." ]
def conv_params(fn): """ Extract the spatial parameters that determine the coordinate mapping: kernel size, stride, padding, and dilation. Implementation detail: Convolution, Deconvolution, and Im2col layers define these in the convolution_param message, while Pooling has its own fields in pooling_param. This method deals with these details to extract canonical parameters. """ params = fn.params.get('convolution_param', fn.params) axis = params.get('axis', 1) ks = np.array(params['kernel_size'], ndmin=1) dilation = np.array(params.get('dilation', 1), ndmin=1) assert len({'pad_h', 'pad_w', 'kernel_h', 'kernel_w', 'stride_h', 'stride_w'} & set(fn.params)) == 0, \ 'cropping does not support legacy _h/_w params' return (axis, np.array(params.get('stride', 1), ndmin=1), (ks - 1) * dilation + 1, np.array(params.get('pad', 0), ndmin=1))
[ "def", "conv_params", "(", "fn", ")", ":", "params", "=", "fn", ".", "params", ".", "get", "(", "'convolution_param'", ",", "fn", ".", "params", ")", "axis", "=", "params", ".", "get", "(", "'axis'", ",", "1", ")", "ks", "=", "np", ".", "array", "(", "params", "[", "'kernel_size'", "]", ",", "ndmin", "=", "1", ")", "dilation", "=", "np", ".", "array", "(", "params", ".", "get", "(", "'dilation'", ",", "1", ")", ",", "ndmin", "=", "1", ")", "assert", "len", "(", "{", "'pad_h'", ",", "'pad_w'", ",", "'kernel_h'", ",", "'kernel_w'", ",", "'stride_h'", ",", "'stride_w'", "}", "&", "set", "(", "fn", ".", "params", ")", ")", "==", "0", ",", "'cropping does not support legacy _h/_w params'", "return", "(", "axis", ",", "np", ".", "array", "(", "params", ".", "get", "(", "'stride'", ",", "1", ")", ",", "ndmin", "=", "1", ")", ",", "(", "ks", "-", "1", ")", "*", "dilation", "+", "1", ",", "np", ".", "array", "(", "params", ".", "get", "(", "'pad'", ",", "0", ")", ",", "ndmin", "=", "1", ")", ")" ]
https://github.com/hunterlew/mstar_deeplearning_project/blob/3761624dcbd7d44af257200542d13d1444dc634a/classification/caffe/build/Release/pycaffe/caffe/coord_map.py#L18-L37
mapnik/mapnik
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
scons/scons-local-4.1.0/SCons/Tool/MSCommon/sdk.py
python
get_default_sdk
()
return InstalledSDKList[0]
Set up the default Platform/Windows SDK.
Set up the default Platform/Windows SDK.
[ "Set", "up", "the", "default", "Platform", "/", "Windows", "SDK", "." ]
def get_default_sdk(): """Set up the default Platform/Windows SDK.""" get_installed_sdks() if not InstalledSDKList: return None return InstalledSDKList[0]
[ "def", "get_default_sdk", "(", ")", ":", "get_installed_sdks", "(", ")", "if", "not", "InstalledSDKList", ":", "return", "None", "return", "InstalledSDKList", "[", "0", "]" ]
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Tool/MSCommon/sdk.py#L339-L344
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/model.py
python
RosdepDatabase.get_view_names
(self)
return self._rosdep_db.keys()
:returns: list of view names that are loaded into this database.
:returns: list of view names that are loaded into this database.
[ ":", "returns", ":", "list", "of", "view", "names", "that", "are", "loaded", "into", "this", "database", "." ]
def get_view_names(self): """ :returns: list of view names that are loaded into this database. """ return self._rosdep_db.keys()
[ "def", "get_view_names", "(", "self", ")", ":", "return", "self", ".", "_rosdep_db", ".", "keys", "(", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/rosdep2/model.py#L92-L96
miyosuda/TensorFlowAndroidMNIST
7b5a4603d2780a8a2834575706e9001977524007
jni-build/jni/include/tensorflow/python/ops/array_ops.py
python
_FillShape
(op)
return [tensor_util.constant_value_as_shape(op.inputs[0])]
Shape function for the Fill op. This op takes a vector of dimensions and a scalar, and produces a tensor with the given dimensions. Args: op: A Fill Operation. Returns: A single-element list containing the shape of the output. Raises: ValueError: If the shapes or arguments are known to be invalid.
Shape function for the Fill op.
[ "Shape", "function", "for", "the", "Fill", "op", "." ]
def _FillShape(op): """Shape function for the Fill op. This op takes a vector of dimensions and a scalar, and produces a tensor with the given dimensions. Args: op: A Fill Operation. Returns: A single-element list containing the shape of the output. Raises: ValueError: If the shapes or arguments are known to be invalid. """ op.inputs[0].get_shape().assert_has_rank(1) op.inputs[1].get_shape().assert_has_rank(0) fill_dims = tensor_util.constant_value(op.inputs[0]) if fill_dims is not None and any(d < 0 for d in fill_dims): raise ValueError("Fill dimensions must be >= 0") return [tensor_util.constant_value_as_shape(op.inputs[0])]
[ "def", "_FillShape", "(", "op", ")", ":", "op", ".", "inputs", "[", "0", "]", ".", "get_shape", "(", ")", ".", "assert_has_rank", "(", "1", ")", "op", ".", "inputs", "[", "1", "]", ".", "get_shape", "(", ")", ".", "assert_has_rank", "(", "0", ")", "fill_dims", "=", "tensor_util", ".", "constant_value", "(", "op", ".", "inputs", "[", "0", "]", ")", "if", "fill_dims", "is", "not", "None", "and", "any", "(", "d", "<", "0", "for", "d", "in", "fill_dims", ")", ":", "raise", "ValueError", "(", "\"Fill dimensions must be >= 0\"", ")", "return", "[", "tensor_util", ".", "constant_value_as_shape", "(", "op", ".", "inputs", "[", "0", "]", ")", "]" ]
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/ops/array_ops.py#L1887-L1907
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/tools/Editra/src/ed_bookmark.py
python
BookmarkWindow.GotoBookmark
(self, mark)
Goto the bookmark in the editor @param mark: BookMark
Goto the bookmark in the editor @param mark: BookMark
[ "Goto", "the", "bookmark", "in", "the", "editor", "@param", "mark", ":", "BookMark" ]
def GotoBookmark(self, mark): """Goto the bookmark in the editor @param mark: BookMark """ app = wx.GetApp() mw = app.GetActiveWindow() if mw: nb = mw.GetNotebook() buf = nb.FindBuffer(mark.Filename) use_handle = True if not buf: nb.OpenPage(ebmlib.GetPathName(mark.Filename), ebmlib.GetFileName(mark.Filename)) buf = nb.GetCurrentPage() use_handle = False # Handle is invalid so use line number if buf: # Ensure the tab is the current one nb.GotoPage(mark.Filename) # Jump to the bookmark line if use_handle: lnum = buf.MarkerLineFromHandle(mark.Handle) else: lnum = mark.Line buf.GotoLine(lnum) else: util.Log("[ed_bookmark][err] Failed to locate mainwindow")
[ "def", "GotoBookmark", "(", "self", ",", "mark", ")", ":", "app", "=", "wx", ".", "GetApp", "(", ")", "mw", "=", "app", ".", "GetActiveWindow", "(", ")", "if", "mw", ":", "nb", "=", "mw", ".", "GetNotebook", "(", ")", "buf", "=", "nb", ".", "FindBuffer", "(", "mark", ".", "Filename", ")", "use_handle", "=", "True", "if", "not", "buf", ":", "nb", ".", "OpenPage", "(", "ebmlib", ".", "GetPathName", "(", "mark", ".", "Filename", ")", ",", "ebmlib", ".", "GetFileName", "(", "mark", ".", "Filename", ")", ")", "buf", "=", "nb", ".", "GetCurrentPage", "(", ")", "use_handle", "=", "False", "# Handle is invalid so use line number", "if", "buf", ":", "# Ensure the tab is the current one", "nb", ".", "GotoPage", "(", "mark", ".", "Filename", ")", "# Jump to the bookmark line", "if", "use_handle", ":", "lnum", "=", "buf", ".", "MarkerLineFromHandle", "(", "mark", ".", "Handle", ")", "else", ":", "lnum", "=", "mark", ".", "Line", "buf", ".", "GotoLine", "(", "lnum", ")", "else", ":", "util", ".", "Log", "(", "\"[ed_bookmark][err] Failed to locate mainwindow\"", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ed_bookmark.py#L227-L254
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/symtable.py
python
Symbol.get_namespaces
(self)
return self.__namespaces
Return a list of namespaces bound to this name
Return a list of namespaces bound to this name
[ "Return", "a", "list", "of", "namespaces", "bound", "to", "this", "name" ]
def get_namespaces(self): """Return a list of namespaces bound to this name""" return self.__namespaces
[ "def", "get_namespaces", "(", "self", ")", ":", "return", "self", ".", "__namespaces" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/symtable.py#L223-L225
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/ops/_op_impl/_custom_op/_basic.py
python
_get_bias
(shape_bias)
return shb
_get_bias
_get_bias
[ "_get_bias" ]
def _get_bias(shape_bias): """_get_bias""" bias_length = shape_bias[0] if bias_length % 16 == 0: shb = shape_bias else: bias_length = (bias_length // 16) * 16 + 16 shape_bias = [] shape_bias.append(bias_length) shb = shape_bias return shb
[ "def", "_get_bias", "(", "shape_bias", ")", ":", "bias_length", "=", "shape_bias", "[", "0", "]", "if", "bias_length", "%", "16", "==", "0", ":", "shb", "=", "shape_bias", "else", ":", "bias_length", "=", "(", "bias_length", "//", "16", ")", "*", "16", "+", "16", "shape_bias", "=", "[", "]", "shape_bias", ".", "append", "(", "bias_length", ")", "shb", "=", "shape_bias", "return", "shb" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_op_impl/_custom_op/_basic.py#L130-L140
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TCh_GetUc
(*args)
return _snap.TCh_GetUc(*args)
TCh_GetUc(char const & Ch) -> char Parameters: Ch: char const &
TCh_GetUc(char const & Ch) -> char
[ "TCh_GetUc", "(", "char", "const", "&", "Ch", ")", "-", ">", "char" ]
def TCh_GetUc(*args): """ TCh_GetUc(char const & Ch) -> char Parameters: Ch: char const & """ return _snap.TCh_GetUc(*args)
[ "def", "TCh_GetUc", "(", "*", "args", ")", ":", "return", "_snap", ".", "TCh_GetUc", "(", "*", "args", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L12707-L12715
ROCmSoftwarePlatform/hipCaffe
4ec5d482515cce532348553b6db6d00d015675d5
scripts/cpp_lint.py
python
ResetNolintSuppressions
()
Resets the set of NOLINT suppressions to empty.
Resets the set of NOLINT suppressions to empty.
[ "Resets", "the", "set", "of", "NOLINT", "suppressions", "to", "empty", "." ]
def ResetNolintSuppressions(): "Resets the set of NOLINT suppressions to empty." _error_suppressions.clear()
[ "def", "ResetNolintSuppressions", "(", ")", ":", "_error_suppressions", ".", "clear", "(", ")" ]
https://github.com/ROCmSoftwarePlatform/hipCaffe/blob/4ec5d482515cce532348553b6db6d00d015675d5/scripts/cpp_lint.py#L495-L497
tkn-tub/ns3-gym
19bfe0a583e641142609939a090a09dfc63a095f
src/visualizer/visualizer/core.py
python
Visualizer.__init__
(self)
! Initializer function. @param self: class object. @return none
! Initializer function.
[ "!", "Initializer", "function", "." ]
def __init__(self): """! Initializer function. @param self: class object. @return none """ assert Visualizer.INSTANCE is None Visualizer.INSTANCE = self super(Visualizer, self).__init__() self.nodes = {} # node index -> Node self.channels = {} # id(ns3.Channel) -> Channel self.window = None # toplevel window self.canvas = None # GooCanvas.Canvas self.time_label = None # Gtk.Label self.play_button = None # Gtk.ToggleButton self.zoom = None # Gtk.Adjustment self._scrolled_window = None # Gtk.ScrolledWindow self.links_group = GooCanvas.CanvasGroup() self.channels_group = GooCanvas.CanvasGroup() self.nodes_group = GooCanvas.CanvasGroup() self._update_timeout_id = None self.simulation = SimulationThread(self) self.selected_node = None # node currently selected self.speed = 1.0 self.information_windows = [] self._transmission_arrows = [] self._last_transmissions = [] self._drop_arrows = [] self._last_drops = [] self._show_transmissions_mode = None self.set_show_transmissions_mode(ShowTransmissionsMode.ALL) self._panning_state = None self.node_size_adjustment = None self.transmissions_smoothing_adjustment = None self.sample_period = SAMPLE_PERIOD self.node_drag_state = None self.follow_node = None self.shell_window = None self.create_gui() for plugin in plugins: plugin(self)
[ "def", "__init__", "(", "self", ")", ":", "assert", "Visualizer", ".", "INSTANCE", "is", "None", "Visualizer", ".", "INSTANCE", "=", "self", "super", "(", "Visualizer", ",", "self", ")", ".", "__init__", "(", ")", "self", ".", "nodes", "=", "{", "}", "# node index -> Node", "self", ".", "channels", "=", "{", "}", "# id(ns3.Channel) -> Channel", "self", ".", "window", "=", "None", "# toplevel window", "self", ".", "canvas", "=", "None", "# GooCanvas.Canvas", "self", ".", "time_label", "=", "None", "# Gtk.Label", "self", ".", "play_button", "=", "None", "# Gtk.ToggleButton", "self", ".", "zoom", "=", "None", "# Gtk.Adjustment", "self", ".", "_scrolled_window", "=", "None", "# Gtk.ScrolledWindow", "self", ".", "links_group", "=", "GooCanvas", ".", "CanvasGroup", "(", ")", "self", ".", "channels_group", "=", "GooCanvas", ".", "CanvasGroup", "(", ")", "self", ".", "nodes_group", "=", "GooCanvas", ".", "CanvasGroup", "(", ")", "self", ".", "_update_timeout_id", "=", "None", "self", ".", "simulation", "=", "SimulationThread", "(", "self", ")", "self", ".", "selected_node", "=", "None", "# node currently selected", "self", ".", "speed", "=", "1.0", "self", ".", "information_windows", "=", "[", "]", "self", ".", "_transmission_arrows", "=", "[", "]", "self", ".", "_last_transmissions", "=", "[", "]", "self", ".", "_drop_arrows", "=", "[", "]", "self", ".", "_last_drops", "=", "[", "]", "self", ".", "_show_transmissions_mode", "=", "None", "self", ".", "set_show_transmissions_mode", "(", "ShowTransmissionsMode", ".", "ALL", ")", "self", ".", "_panning_state", "=", "None", "self", ".", "node_size_adjustment", "=", "None", "self", ".", "transmissions_smoothing_adjustment", "=", "None", "self", ".", "sample_period", "=", "SAMPLE_PERIOD", "self", ".", "node_drag_state", "=", "None", "self", ".", "follow_node", "=", "None", "self", ".", "shell_window", "=", "None", "self", ".", "create_gui", "(", ")", "for", "plugin", "in", "plugins", ":", "plugin", "(", "self", ")" ]
https://github.com/tkn-tub/ns3-gym/blob/19bfe0a583e641142609939a090a09dfc63a095f/src/visualizer/visualizer/core.py#L718-L763
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/telemetry/third_party/web-page-replay/platformsettings.py
python
_WindowsPlatformSettings.get_system_logging_handler
(self)
return DebugViewHandler()
Return a handler for the logging module (optional). For Windows, output can be viewed with DebugView. http://technet.microsoft.com/en-us/sysinternals/bb896647.aspx
Return a handler for the logging module (optional).
[ "Return", "a", "handler", "for", "the", "logging", "module", "(", "optional", ")", "." ]
def get_system_logging_handler(self): """Return a handler for the logging module (optional). For Windows, output can be viewed with DebugView. http://technet.microsoft.com/en-us/sysinternals/bb896647.aspx """ import ctypes output_debug_string = ctypes.windll.kernel32.OutputDebugStringA output_debug_string.argtypes = [ctypes.c_char_p] class DebugViewHandler(logging.Handler): def emit(self, record): output_debug_string('[wpr] ' + self.format(record)) return DebugViewHandler()
[ "def", "get_system_logging_handler", "(", "self", ")", ":", "import", "ctypes", "output_debug_string", "=", "ctypes", ".", "windll", ".", "kernel32", ".", "OutputDebugStringA", "output_debug_string", ".", "argtypes", "=", "[", "ctypes", ".", "c_char_p", "]", "class", "DebugViewHandler", "(", "logging", ".", "Handler", ")", ":", "def", "emit", "(", "self", ",", "record", ")", ":", "output_debug_string", "(", "'[wpr] '", "+", "self", ".", "format", "(", "record", ")", ")", "return", "DebugViewHandler", "(", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/web-page-replay/platformsettings.py#L601-L613
BitMEX/api-connectors
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
auto-generated/python/swagger_client/api/chat_api.py
python
ChatApi.chat_get
(self, **kwargs)
Get chat messages. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.chat_get(async_req=True) >>> result = thread.get() :param async_req bool :param float count: Number of results to fetch. :param float start: Starting ID for results. :param bool reverse: If true, will sort results newest first. :param float channel_id: Channel id. GET /chat/channels for ids. Leave blank for all. :return: list[Chat] If the method is called asynchronously, returns the request thread.
Get chat messages. # noqa: E501
[ "Get", "chat", "messages", ".", "#", "noqa", ":", "E501" ]
def chat_get(self, **kwargs): # noqa: E501 """Get chat messages. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.chat_get(async_req=True) >>> result = thread.get() :param async_req bool :param float count: Number of results to fetch. :param float start: Starting ID for results. :param bool reverse: If true, will sort results newest first. :param float channel_id: Channel id. GET /chat/channels for ids. Leave blank for all. :return: list[Chat] If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.chat_get_with_http_info(**kwargs) # noqa: E501 else: (data) = self.chat_get_with_http_info(**kwargs) # noqa: E501 return data
[ "def", "chat_get", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "chat_get_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "chat_get_with_http_info", "(", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/api/chat_api.py#L36-L58
RamadhanAmizudin/malware
2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1
Fuzzbunch/fuzzbunch/redirection.py
python
RedirectionManager.config_redirect
(self, plugin, do_redir)
return id
Configure whether the plug-in should perform redirection plugin - An instance of a plugin do_redir - Should we do redirection? (True or False)
Configure whether the plug-in should perform redirection plugin - An instance of a plugin do_redir - Should we do redirection? (True or False)
[ "Configure", "whether", "the", "plug", "-", "in", "should", "perform", "redirection", "plugin", "-", "An", "instance", "of", "a", "plugin", "do_redir", "-", "Should", "we", "do", "redirection?", "(", "True", "or", "False", ")" ]
def config_redirect(self, plugin, do_redir): """Configure whether the plug-in should perform redirection plugin - An instance of a plugin do_redir - Should we do redirection? (True or False)""" redir = plugin.getRedirection() # Make a new session dictionary here session_data = { 'params' : {}, # 'remote' : [], # 'local' : [] # } if do_redir: id = uuid.uuid4() else: id = 0 try: self.io.newline() self.io.print_success("Configure Plugin Local Tunnels") for l in redir['local']: if do_redir: self.redirect_local(l, plugin, session_data) else: self.straight_local(l, plugin) self.io.newline() self.io.print_success("Configure Plugin Remote Tunnels") for r in redir['remote']: if do_redir: self.redirect_remote(r, plugin, session_data) else: self.straight_remote(r, plugin) except exception.PromptErr: for key,val in session_data['params'].items(): plugin.set(key, val) raise self.io.newline() # Store info into the cache so that we can restore it in post_exec if id: self.session_cache[id] = session_data return id
[ "def", "config_redirect", "(", "self", ",", "plugin", ",", "do_redir", ")", ":", "redir", "=", "plugin", ".", "getRedirection", "(", ")", "# Make a new session dictionary here", "session_data", "=", "{", "'params'", ":", "{", "}", ",", "# ", "'remote'", ":", "[", "]", ",", "# ", "'local'", ":", "[", "]", "# ", "}", "if", "do_redir", ":", "id", "=", "uuid", ".", "uuid4", "(", ")", "else", ":", "id", "=", "0", "try", ":", "self", ".", "io", ".", "newline", "(", ")", "self", ".", "io", ".", "print_success", "(", "\"Configure Plugin Local Tunnels\"", ")", "for", "l", "in", "redir", "[", "'local'", "]", ":", "if", "do_redir", ":", "self", ".", "redirect_local", "(", "l", ",", "plugin", ",", "session_data", ")", "else", ":", "self", ".", "straight_local", "(", "l", ",", "plugin", ")", "self", ".", "io", ".", "newline", "(", ")", "self", ".", "io", ".", "print_success", "(", "\"Configure Plugin Remote Tunnels\"", ")", "for", "r", "in", "redir", "[", "'remote'", "]", ":", "if", "do_redir", ":", "self", ".", "redirect_remote", "(", "r", ",", "plugin", ",", "session_data", ")", "else", ":", "self", ".", "straight_remote", "(", "r", ",", "plugin", ")", "except", "exception", ".", "PromptErr", ":", "for", "key", ",", "val", "in", "session_data", "[", "'params'", "]", ".", "items", "(", ")", ":", "plugin", ".", "set", "(", "key", ",", "val", ")", "raise", "self", ".", "io", ".", "newline", "(", ")", "# Store info into the cache so that we can restore it in post_exec", "if", "id", ":", "self", ".", "session_cache", "[", "id", "]", "=", "session_data", "return", "id" ]
https://github.com/RamadhanAmizudin/malware/blob/2c6c53c8b0d556f5d8078d6ca0fc4448f4697cf1/Fuzzbunch/fuzzbunch/redirection.py#L466-L508
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py
python
TFAsymmetryFittingModel._get_all_fit_function_parameter_values_for_tf_single_function
(self, tf_single_function: IFunction)
return [self._get_normalisation_from_tf_fit_function(tf_single_function)] + parameter_values
Returns the required parameters values including normalisation from a TF asymmetry single function.
Returns the required parameters values including normalisation from a TF asymmetry single function.
[ "Returns", "the", "required", "parameters", "values", "including", "normalisation", "from", "a", "TF", "asymmetry", "single", "function", "." ]
def _get_all_fit_function_parameter_values_for_tf_single_function(self, tf_single_function: IFunction) -> list: """Returns the required parameters values including normalisation from a TF asymmetry single function.""" normal_single_function = self._get_normal_fit_function_from(tf_single_function) parameter_values, _ = self.get_fit_function_parameter_values(normal_single_function) return [self._get_normalisation_from_tf_fit_function(tf_single_function)] + parameter_values
[ "def", "_get_all_fit_function_parameter_values_for_tf_single_function", "(", "self", ",", "tf_single_function", ":", "IFunction", ")", "->", "list", ":", "normal_single_function", "=", "self", ".", "_get_normal_fit_function_from", "(", "tf_single_function", ")", "parameter_values", ",", "_", "=", "self", ".", "get_fit_function_parameter_values", "(", "normal_single_function", ")", "return", "[", "self", ".", "_get_normalisation_from_tf_fit_function", "(", "tf_single_function", ")", "]", "+", "parameter_values" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/tf_asymmetry_fitting/tf_asymmetry_fitting_model.py#L691-L695
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/distutils/fcompiler/__init__.py
python
load_all_fcompiler_classes
()
Cache all the FCompiler classes found in modules in the numpy.distutils.fcompiler package.
Cache all the FCompiler classes found in modules in the numpy.distutils.fcompiler package.
[ "Cache", "all", "the", "FCompiler", "classes", "found", "in", "modules", "in", "the", "numpy", ".", "distutils", ".", "fcompiler", "package", "." ]
def load_all_fcompiler_classes(): """Cache all the FCompiler classes found in modules in the numpy.distutils.fcompiler package. """ from glob import glob global fcompiler_class, fcompiler_aliases if fcompiler_class is not None: return pys = os.path.join(os.path.dirname(__file__), '*.py') fcompiler_class = {} fcompiler_aliases = {} for fname in glob(pys): module_name, ext = os.path.splitext(os.path.basename(fname)) module_name = 'numpy.distutils.fcompiler.' + module_name __import__ (module_name) module = sys.modules[module_name] if hasattr(module, 'compilers'): for cname in module.compilers: klass = getattr(module, cname) desc = (klass.compiler_type, klass, klass.description) fcompiler_class[klass.compiler_type] = desc for alias in klass.compiler_aliases: if alias in fcompiler_aliases: raise ValueError("alias %r defined for both %s and %s" % (alias, klass.__name__, fcompiler_aliases[alias][1].__name__)) fcompiler_aliases[alias] = desc
[ "def", "load_all_fcompiler_classes", "(", ")", ":", "from", "glob", "import", "glob", "global", "fcompiler_class", ",", "fcompiler_aliases", "if", "fcompiler_class", "is", "not", "None", ":", "return", "pys", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'*.py'", ")", "fcompiler_class", "=", "{", "}", "fcompiler_aliases", "=", "{", "}", "for", "fname", "in", "glob", "(", "pys", ")", ":", "module_name", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "fname", ")", ")", "module_name", "=", "'numpy.distutils.fcompiler.'", "+", "module_name", "__import__", "(", "module_name", ")", "module", "=", "sys", ".", "modules", "[", "module_name", "]", "if", "hasattr", "(", "module", ",", "'compilers'", ")", ":", "for", "cname", "in", "module", ".", "compilers", ":", "klass", "=", "getattr", "(", "module", ",", "cname", ")", "desc", "=", "(", "klass", ".", "compiler_type", ",", "klass", ",", "klass", ".", "description", ")", "fcompiler_class", "[", "klass", ".", "compiler_type", "]", "=", "desc", "for", "alias", "in", "klass", ".", "compiler_aliases", ":", "if", "alias", "in", "fcompiler_aliases", ":", "raise", "ValueError", "(", "\"alias %r defined for both %s and %s\"", "%", "(", "alias", ",", "klass", ".", "__name__", ",", "fcompiler_aliases", "[", "alias", "]", "[", "1", "]", ".", "__name__", ")", ")", "fcompiler_aliases", "[", "alias", "]", "=", "desc" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/distutils/fcompiler/__init__.py#L726-L752
BVLC/caffe
9b891540183ddc834a02b2bd81b31afae71b2153
scripts/cpp_lint.py
python
CheckForNewlineAtEOF
(filename, lines, error)
Logs an error if there is no newline char at the end of the file. Args: filename: The name of the current file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found.
Logs an error if there is no newline char at the end of the file.
[ "Logs", "an", "error", "if", "there", "is", "no", "newline", "char", "at", "the", "end", "of", "the", "file", "." ]
def CheckForNewlineAtEOF(filename, lines, error): """Logs an error if there is no newline char at the end of the file. Args: filename: The name of the current file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found. """ # The array lines() was created by adding two newlines to the # original file (go figure), then splitting on \n. # To verify that the file ends in \n, we just have to make sure the # last-but-two element of lines() exists and is empty. if len(lines) < 3 or lines[-2]: error(filename, len(lines) - 2, 'whitespace/ending_newline', 5, 'Could not find a newline character at the end of the file.')
[ "def", "CheckForNewlineAtEOF", "(", "filename", ",", "lines", ",", "error", ")", ":", "# The array lines() was created by adding two newlines to the", "# original file (go figure), then splitting on \\n.", "# To verify that the file ends in \\n, we just have to make sure the", "# last-but-two element of lines() exists and is empty.", "if", "len", "(", "lines", ")", "<", "3", "or", "lines", "[", "-", "2", "]", ":", "error", "(", "filename", ",", "len", "(", "lines", ")", "-", "2", ",", "'whitespace/ending_newline'", ",", "5", ",", "'Could not find a newline character at the end of the file.'", ")" ]
https://github.com/BVLC/caffe/blob/9b891540183ddc834a02b2bd81b31afae71b2153/scripts/cpp_lint.py#L1512-L1527
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py
python
CursesUI._scroll_output
(self, direction, line_index=None)
Scroll the output pad. Args: direction: _SCROLL_REFRESH, _SCROLL_UP, _SCROLL_DOWN, _SCROLL_UP_A_LINE, _SCROLL_DOWN_A_LINE, _SCROLL_HOME, _SCROLL_END, _SCROLL_TO_LINE_INDEX line_index: (int) Specifies the zero-based line index to scroll to. Applicable only if direction is _SCROLL_TO_LINE_INDEX. Raises: ValueError: On invalid scroll direction. TypeError: If line_index is not int and direction is _SCROLL_TO_LINE_INDEX.
Scroll the output pad.
[ "Scroll", "the", "output", "pad", "." ]
def _scroll_output(self, direction, line_index=None): """Scroll the output pad. Args: direction: _SCROLL_REFRESH, _SCROLL_UP, _SCROLL_DOWN, _SCROLL_UP_A_LINE, _SCROLL_DOWN_A_LINE, _SCROLL_HOME, _SCROLL_END, _SCROLL_TO_LINE_INDEX line_index: (int) Specifies the zero-based line index to scroll to. Applicable only if direction is _SCROLL_TO_LINE_INDEX. Raises: ValueError: On invalid scroll direction. TypeError: If line_index is not int and direction is _SCROLL_TO_LINE_INDEX. """ if not self._output_pad: # No output pad is present. Do nothing. return if direction == _SCROLL_REFRESH: pass elif direction == _SCROLL_UP: # Scroll up. self._output_pad_row -= int(self._output_num_rows / 3) if self._output_pad_row < 0: self._output_pad_row = 0 elif direction == _SCROLL_DOWN: # Scroll down. self._output_pad_row += int(self._output_num_rows / 3) if (self._output_pad_row > self._output_pad_height - self._output_pad_screen_height - 1): self._output_pad_row = ( self._output_pad_height - self._output_pad_screen_height - 1) elif direction == _SCROLL_UP_A_LINE: # Scroll up a line if self._output_pad_row - 1 >= 0: self._output_pad_row -= 1 elif direction == _SCROLL_DOWN_A_LINE: # Scroll down a line if self._output_pad_row + 1 < ( self._output_pad_height - self._output_pad_screen_height): self._output_pad_row += 1 elif direction == _SCROLL_HOME: # Scroll to top self._output_pad_row = 0 elif direction == _SCROLL_END: # Scroll to bottom self._output_pad_row = ( self._output_pad_height - self._output_pad_screen_height - 1) elif direction == _SCROLL_TO_LINE_INDEX: if not isinstance(line_index, int): raise TypeError("Invalid line_index type (%s) under mode %s" % (type(line_index), _SCROLL_TO_LINE_INDEX)) self._output_pad_row = line_index else: raise ValueError("Unsupported scroll mode: %s" % direction) self._nav_history.update_scroll_position(self._output_pad_row) # Actually scroll the output pad: refresh with new location. output_pad_top = self._output_pad_screen_location.top if self._main_menu_pad: output_pad_top += 1 self._screen_scroll_output_pad(self._output_pad, self._output_pad_row, 0, output_pad_top, self._output_pad_screen_location.left, self._output_pad_screen_location.bottom, self._output_pad_screen_location.right) self._screen_render_nav_bar() self._screen_render_menu_pad() self._scroll_info = self._compile_ui_status_summary() self._screen_draw_text_line( self._output_scroll_row, self._scroll_info, color=self._STATUS_BAR_COLOR_PAIR)
[ "def", "_scroll_output", "(", "self", ",", "direction", ",", "line_index", "=", "None", ")", ":", "if", "not", "self", ".", "_output_pad", ":", "# No output pad is present. Do nothing.", "return", "if", "direction", "==", "_SCROLL_REFRESH", ":", "pass", "elif", "direction", "==", "_SCROLL_UP", ":", "# Scroll up.", "self", ".", "_output_pad_row", "-=", "int", "(", "self", ".", "_output_num_rows", "/", "3", ")", "if", "self", ".", "_output_pad_row", "<", "0", ":", "self", ".", "_output_pad_row", "=", "0", "elif", "direction", "==", "_SCROLL_DOWN", ":", "# Scroll down.", "self", ".", "_output_pad_row", "+=", "int", "(", "self", ".", "_output_num_rows", "/", "3", ")", "if", "(", "self", ".", "_output_pad_row", ">", "self", ".", "_output_pad_height", "-", "self", ".", "_output_pad_screen_height", "-", "1", ")", ":", "self", ".", "_output_pad_row", "=", "(", "self", ".", "_output_pad_height", "-", "self", ".", "_output_pad_screen_height", "-", "1", ")", "elif", "direction", "==", "_SCROLL_UP_A_LINE", ":", "# Scroll up a line", "if", "self", ".", "_output_pad_row", "-", "1", ">=", "0", ":", "self", ".", "_output_pad_row", "-=", "1", "elif", "direction", "==", "_SCROLL_DOWN_A_LINE", ":", "# Scroll down a line", "if", "self", ".", "_output_pad_row", "+", "1", "<", "(", "self", ".", "_output_pad_height", "-", "self", ".", "_output_pad_screen_height", ")", ":", "self", ".", "_output_pad_row", "+=", "1", "elif", "direction", "==", "_SCROLL_HOME", ":", "# Scroll to top", "self", ".", "_output_pad_row", "=", "0", "elif", "direction", "==", "_SCROLL_END", ":", "# Scroll to bottom", "self", ".", "_output_pad_row", "=", "(", "self", ".", "_output_pad_height", "-", "self", ".", "_output_pad_screen_height", "-", "1", ")", "elif", "direction", "==", "_SCROLL_TO_LINE_INDEX", ":", "if", "not", "isinstance", "(", "line_index", ",", "int", ")", ":", "raise", "TypeError", "(", "\"Invalid line_index type (%s) under mode %s\"", "%", "(", "type", "(", "line_index", ")", ",", "_SCROLL_TO_LINE_INDEX", ")", ")", "self", ".", "_output_pad_row", "=", "line_index", "else", ":", "raise", "ValueError", "(", "\"Unsupported scroll mode: %s\"", "%", "direction", ")", "self", ".", "_nav_history", ".", "update_scroll_position", "(", "self", ".", "_output_pad_row", ")", "# Actually scroll the output pad: refresh with new location.", "output_pad_top", "=", "self", ".", "_output_pad_screen_location", ".", "top", "if", "self", ".", "_main_menu_pad", ":", "output_pad_top", "+=", "1", "self", ".", "_screen_scroll_output_pad", "(", "self", ".", "_output_pad", ",", "self", ".", "_output_pad_row", ",", "0", ",", "output_pad_top", ",", "self", ".", "_output_pad_screen_location", ".", "left", ",", "self", ".", "_output_pad_screen_location", ".", "bottom", ",", "self", ".", "_output_pad_screen_location", ".", "right", ")", "self", ".", "_screen_render_nav_bar", "(", ")", "self", ".", "_screen_render_menu_pad", "(", ")", "self", ".", "_scroll_info", "=", "self", ".", "_compile_ui_status_summary", "(", ")", "self", ".", "_screen_draw_text_line", "(", "self", ".", "_output_scroll_row", ",", "self", ".", "_scroll_info", ",", "color", "=", "self", ".", "_STATUS_BAR_COLOR_PAIR", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/cli/curses_ui.py#L1316-L1391
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
media/webrtc/trunk/webrtc/tools/compare_videos.py
python
_ParseArgs
()
return options
Registers the command-line options.
Registers the command-line options.
[ "Registers", "the", "command", "-", "line", "options", "." ]
def _ParseArgs(): """Registers the command-line options.""" usage = 'usage: %prog [options]' parser = optparse.OptionParser(usage=usage) parser.add_option('--label', type='string', default='MY_TEST', help=('Label of the test, used to identify different ' 'tests. Default: %default')) parser.add_option('--ref_video', type='string', help='Reference video to compare with (YUV).') parser.add_option('--test_video', type='string', help=('Test video to be compared with the reference ' 'video (YUV).')) parser.add_option('--frame_analyzer', type='string', help='Path to the frame analyzer executable.') parser.add_option('--barcode_decoder', type='string', help=('Path to the barcode decoder script. By default, we ' 'will assume we can find it in barcode_tools/' 'relative to this directory.')) parser.add_option('--ffmpeg_path', type='string', help=('The path to where the ffmpeg executable is located. ' 'If omitted, it will be assumed to be present in the ' 'PATH with the name ffmpeg[.exe].')) parser.add_option('--zxing_path', type='string', help=('The path to where the zxing executable is located. ' 'If omitted, it will be assumed to be present in the ' 'PATH with the name zxing[.exe].')) parser.add_option('--stats_file', type='string', default='stats.txt', help=('Path to the temporary stats file to be created and ' 'used. Default: %default')) parser.add_option('--yuv_frame_width', type='int', default=640, help='Width of the YUV file\'s frames. Default: %default') parser.add_option('--yuv_frame_height', type='int', default=480, help='Height of the YUV file\'s frames. Default: %default') options, _args = parser.parse_args() if not options.ref_video: parser.error('You must provide a path to the reference video!') if not os.path.exists(options.ref_video): parser.error('Cannot find the reference video at %s' % options.ref_video) if not options.test_video: parser.error('You must provide a path to the test video!') if not os.path.exists(options.test_video): parser.error('Cannot find the test video at %s' % options.test_video) if not options.frame_analyzer: parser.error('You must provide the path to the frame analyzer executable!') if not os.path.exists(options.frame_analyzer): parser.error('Cannot find frame analyzer executable at %s!' % options.frame_analyzer) return options
[ "def", "_ParseArgs", "(", ")", ":", "usage", "=", "'usage: %prog [options]'", "parser", "=", "optparse", ".", "OptionParser", "(", "usage", "=", "usage", ")", "parser", ".", "add_option", "(", "'--label'", ",", "type", "=", "'string'", ",", "default", "=", "'MY_TEST'", ",", "help", "=", "(", "'Label of the test, used to identify different '", "'tests. Default: %default'", ")", ")", "parser", ".", "add_option", "(", "'--ref_video'", ",", "type", "=", "'string'", ",", "help", "=", "'Reference video to compare with (YUV).'", ")", "parser", ".", "add_option", "(", "'--test_video'", ",", "type", "=", "'string'", ",", "help", "=", "(", "'Test video to be compared with the reference '", "'video (YUV).'", ")", ")", "parser", ".", "add_option", "(", "'--frame_analyzer'", ",", "type", "=", "'string'", ",", "help", "=", "'Path to the frame analyzer executable.'", ")", "parser", ".", "add_option", "(", "'--barcode_decoder'", ",", "type", "=", "'string'", ",", "help", "=", "(", "'Path to the barcode decoder script. By default, we '", "'will assume we can find it in barcode_tools/'", "'relative to this directory.'", ")", ")", "parser", ".", "add_option", "(", "'--ffmpeg_path'", ",", "type", "=", "'string'", ",", "help", "=", "(", "'The path to where the ffmpeg executable is located. '", "'If omitted, it will be assumed to be present in the '", "'PATH with the name ffmpeg[.exe].'", ")", ")", "parser", ".", "add_option", "(", "'--zxing_path'", ",", "type", "=", "'string'", ",", "help", "=", "(", "'The path to where the zxing executable is located. '", "'If omitted, it will be assumed to be present in the '", "'PATH with the name zxing[.exe].'", ")", ")", "parser", ".", "add_option", "(", "'--stats_file'", ",", "type", "=", "'string'", ",", "default", "=", "'stats.txt'", ",", "help", "=", "(", "'Path to the temporary stats file to be created and '", "'used. Default: %default'", ")", ")", "parser", ".", "add_option", "(", "'--yuv_frame_width'", ",", "type", "=", "'int'", ",", "default", "=", "640", ",", "help", "=", "'Width of the YUV file\\'s frames. Default: %default'", ")", "parser", ".", "add_option", "(", "'--yuv_frame_height'", ",", "type", "=", "'int'", ",", "default", "=", "480", ",", "help", "=", "'Height of the YUV file\\'s frames. Default: %default'", ")", "options", ",", "_args", "=", "parser", ".", "parse_args", "(", ")", "if", "not", "options", ".", "ref_video", ":", "parser", ".", "error", "(", "'You must provide a path to the reference video!'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "ref_video", ")", ":", "parser", ".", "error", "(", "'Cannot find the reference video at %s'", "%", "options", ".", "ref_video", ")", "if", "not", "options", ".", "test_video", ":", "parser", ".", "error", "(", "'You must provide a path to the test video!'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "test_video", ")", ":", "parser", ".", "error", "(", "'Cannot find the test video at %s'", "%", "options", ".", "test_video", ")", "if", "not", "options", ".", "frame_analyzer", ":", "parser", ".", "error", "(", "'You must provide the path to the frame analyzer executable!'", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "options", ".", "frame_analyzer", ")", ":", "parser", ".", "error", "(", "'Cannot find frame analyzer executable at %s!'", "%", "options", ".", "frame_analyzer", ")", "return", "options" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/media/webrtc/trunk/webrtc/tools/compare_videos.py#L24-L75
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/results_tab_widget/results_tab_presenter.py
python
ResultsTabPresenter.on_new_fit_performed
(self, fit_info=None)
React to a new fit created in the fitting tab
React to a new fit created in the fitting tab
[ "React", "to", "a", "new", "fit", "created", "in", "the", "fitting", "tab" ]
def on_new_fit_performed(self, fit_info=None): """React to a new fit created in the fitting tab""" # It's possible that this call can come in on a thread that # is different to the one that the view lives on. # In order to update the GUI we use invokeMethod with the assumption # that 'self' lives on the same thread as the view and Qt forces # the call to the chose method to be done on the thread the # view lives on. This avoids errors from painting on non-gui threads. new_fit_name = ";" if fit_info: new_fit_list = fit_info.output_workspace_names() if new_fit_list and len(new_fit_list)>0: new_fit_name = new_fit_list[0] QMetaObject.invokeMethod(self, "_on_new_fit_performed_impl", Q_ARG(str, new_fit_name))
[ "def", "on_new_fit_performed", "(", "self", ",", "fit_info", "=", "None", ")", ":", "# It's possible that this call can come in on a thread that", "# is different to the one that the view lives on.", "# In order to update the GUI we use invokeMethod with the assumption", "# that 'self' lives on the same thread as the view and Qt forces", "# the call to the chose method to be done on the thread the", "# view lives on. This avoids errors from painting on non-gui threads.", "new_fit_name", "=", "\";\"", "if", "fit_info", ":", "new_fit_list", "=", "fit_info", ".", "output_workspace_names", "(", ")", "if", "new_fit_list", "and", "len", "(", "new_fit_list", ")", ">", "0", ":", "new_fit_name", "=", "new_fit_list", "[", "0", "]", "QMetaObject", ".", "invokeMethod", "(", "self", ",", "\"_on_new_fit_performed_impl\"", ",", "Q_ARG", "(", "str", ",", "new_fit_name", ")", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/results_tab_widget/results_tab_presenter.py#L48-L62
SpenceKonde/megaTinyCore
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py
python
Avr8Protocol.run
(self)
Resumes core execution
Resumes core execution
[ "Resumes", "core", "execution" ]
def run(self): """Resumes core execution""" self.logger.debug("AVR core resume") self.check_response(self.jtagice3_command_response(bytearray([self.CMD_AVR8_RUN, self.CMD_VERSION0])))
[ "def", "run", "(", "self", ")", ":", "self", ".", "logger", ".", "debug", "(", "\"AVR core resume\"", ")", "self", ".", "check_response", "(", "self", ".", "jtagice3_command_response", "(", "bytearray", "(", "[", "self", ".", "CMD_AVR8_RUN", ",", "self", ".", "CMD_VERSION0", "]", ")", ")", ")" ]
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/pyedbglib/protocols/avr8protocol.py#L394-L397
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/keras/legacy_tf_layers/core.py
python
dense
( inputs, units, activation=None, use_bias=True, kernel_initializer=None, bias_initializer=init_ops.zeros_initializer(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, reuse=None)
return layer.apply(inputs)
Functional interface for the densely-connected layer. This layer implements the operation: `outputs = activation(inputs * kernel + bias)` where `activation` is the activation function passed as the `activation` argument (if not `None`), `kernel` is a weights matrix created by the layer, and `bias` is a bias vector created by the layer (only if `use_bias` is `True`). Args: inputs: Tensor input. units: Integer or Long, dimensionality of the output space. activation: Activation function (callable). Set it to None to maintain a linear activation. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: Initializer function for the weight matrix. If `None` (default), weights are initialized using the default initializer used by `tf.compat.v1.get_variable`. bias_initializer: Initializer function for the bias. kernel_regularizer: Regularizer function for the weight matrix. bias_regularizer: Regularizer function for the bias. activity_regularizer: Regularizer function for the output. kernel_constraint: An optional projection function to be applied to the kernel after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. bias_constraint: An optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). name: String, the name of the layer. reuse: Boolean, whether to reuse the weights of a previous layer by the same name. Returns: Output tensor the same shape as `inputs` except the last dimension is of size `units`. Raises: ValueError: if eager execution is enabled.
Functional interface for the densely-connected layer.
[ "Functional", "interface", "for", "the", "densely", "-", "connected", "layer", "." ]
def dense( inputs, units, activation=None, use_bias=True, kernel_initializer=None, bias_initializer=init_ops.zeros_initializer(), kernel_regularizer=None, bias_regularizer=None, activity_regularizer=None, kernel_constraint=None, bias_constraint=None, trainable=True, name=None, reuse=None): """Functional interface for the densely-connected layer. This layer implements the operation: `outputs = activation(inputs * kernel + bias)` where `activation` is the activation function passed as the `activation` argument (if not `None`), `kernel` is a weights matrix created by the layer, and `bias` is a bias vector created by the layer (only if `use_bias` is `True`). Args: inputs: Tensor input. units: Integer or Long, dimensionality of the output space. activation: Activation function (callable). Set it to None to maintain a linear activation. use_bias: Boolean, whether the layer uses a bias. kernel_initializer: Initializer function for the weight matrix. If `None` (default), weights are initialized using the default initializer used by `tf.compat.v1.get_variable`. bias_initializer: Initializer function for the bias. kernel_regularizer: Regularizer function for the weight matrix. bias_regularizer: Regularizer function for the bias. activity_regularizer: Regularizer function for the output. kernel_constraint: An optional projection function to be applied to the kernel after being updated by an `Optimizer` (e.g. used to implement norm constraints or value constraints for layer weights). The function must take as input the unprojected variable and must return the projected variable (which must have the same shape). Constraints are not safe to use when doing asynchronous distributed training. bias_constraint: An optional projection function to be applied to the bias after being updated by an `Optimizer`. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`). name: String, the name of the layer. reuse: Boolean, whether to reuse the weights of a previous layer by the same name. Returns: Output tensor the same shape as `inputs` except the last dimension is of size `units`. Raises: ValueError: if eager execution is enabled. """ warnings.warn('`tf.layers.dense` is deprecated and ' 'will be removed in a future version. ' 'Please use `tf.keras.layers.Dense` instead.') layer = Dense(units, activation=activation, use_bias=use_bias, kernel_initializer=kernel_initializer, bias_initializer=bias_initializer, kernel_regularizer=kernel_regularizer, bias_regularizer=bias_regularizer, activity_regularizer=activity_regularizer, kernel_constraint=kernel_constraint, bias_constraint=bias_constraint, trainable=trainable, name=name, _scope=name, _reuse=reuse) return layer.apply(inputs)
[ "def", "dense", "(", "inputs", ",", "units", ",", "activation", "=", "None", ",", "use_bias", "=", "True", ",", "kernel_initializer", "=", "None", ",", "bias_initializer", "=", "init_ops", ".", "zeros_initializer", "(", ")", ",", "kernel_regularizer", "=", "None", ",", "bias_regularizer", "=", "None", ",", "activity_regularizer", "=", "None", ",", "kernel_constraint", "=", "None", ",", "bias_constraint", "=", "None", ",", "trainable", "=", "True", ",", "name", "=", "None", ",", "reuse", "=", "None", ")", ":", "warnings", ".", "warn", "(", "'`tf.layers.dense` is deprecated and '", "'will be removed in a future version. '", "'Please use `tf.keras.layers.Dense` instead.'", ")", "layer", "=", "Dense", "(", "units", ",", "activation", "=", "activation", ",", "use_bias", "=", "use_bias", ",", "kernel_initializer", "=", "kernel_initializer", ",", "bias_initializer", "=", "bias_initializer", ",", "kernel_regularizer", "=", "kernel_regularizer", ",", "bias_regularizer", "=", "bias_regularizer", ",", "activity_regularizer", "=", "activity_regularizer", ",", "kernel_constraint", "=", "kernel_constraint", ",", "bias_constraint", "=", "bias_constraint", ",", "trainable", "=", "trainable", ",", "name", "=", "name", ",", "_scope", "=", "name", ",", "_reuse", "=", "reuse", ")", "return", "layer", ".", "apply", "(", "inputs", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/keras/legacy_tf_layers/core.py#L113-L187
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/distutils/ccompiler.py
python
CCompiler.add_include_dir
(self, dir)
Add 'dir' to the list of directories that will be searched for header files. The compiler is instructed to search directories in the order in which they are supplied by successive calls to 'add_include_dir()'.
Add 'dir' to the list of directories that will be searched for header files. The compiler is instructed to search directories in the order in which they are supplied by successive calls to 'add_include_dir()'.
[ "Add", "dir", "to", "the", "list", "of", "directories", "that", "will", "be", "searched", "for", "header", "files", ".", "The", "compiler", "is", "instructed", "to", "search", "directories", "in", "the", "order", "in", "which", "they", "are", "supplied", "by", "successive", "calls", "to", "add_include_dir", "()", "." ]
def add_include_dir(self, dir): """Add 'dir' to the list of directories that will be searched for header files. The compiler is instructed to search directories in the order in which they are supplied by successive calls to 'add_include_dir()'. """ self.include_dirs.append(dir)
[ "def", "add_include_dir", "(", "self", ",", "dir", ")", ":", "self", ".", "include_dirs", ".", "append", "(", "dir", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/distutils/ccompiler.py#L217-L223
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/client/session.py
python
BaseSession.close
(self)
Closes this session. Calling this method frees all resources associated with the session. Raises: tf.errors.OpError: Or one of its subclasses if an error occurs while closing the TensorFlow session.
Closes this session.
[ "Closes", "this", "session", "." ]
def close(self): """Closes this session. Calling this method frees all resources associated with the session. Raises: tf.errors.OpError: Or one of its subclasses if an error occurs while closing the TensorFlow session. """ if self._session and not self._closed: self._closed = True tf_session.TF_CloseSession(self._session)
[ "def", "close", "(", "self", ")", ":", "if", "self", ".", "_session", "and", "not", "self", ".", "_closed", ":", "self", ".", "_closed", "=", "True", "tf_session", ".", "TF_CloseSession", "(", "self", ".", "_session", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/client/session.py#L742-L753
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SocketServer.py
python
ThreadingMixIn.process_request
(self, request, client_address)
Start a new thread to process the request.
Start a new thread to process the request.
[ "Start", "a", "new", "thread", "to", "process", "the", "request", "." ]
def process_request(self, request, client_address): """Start a new thread to process the request.""" t = threading.Thread(target = self.process_request_thread, args = (request, client_address)) t.daemon = self.daemon_threads t.start()
[ "def", "process_request", "(", "self", ",", "request", ",", "client_address", ")", ":", "t", "=", "threading", ".", "Thread", "(", "target", "=", "self", ".", "process_request_thread", ",", "args", "=", "(", "request", ",", "client_address", ")", ")", "t", ".", "daemon", "=", "self", ".", "daemon_threads", "t", ".", "start", "(", ")" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/SocketServer.py#L599-L604
quantOS-org/DataCore
e2ef9bd2c22ee9e2845675b6435a14fa607f3551
mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py
python
_AddMessageMethods
(message_descriptor, cls)
Adds the methods to a protocol message class.
Adds the methods to a protocol message class.
[ "Adds", "the", "methods", "to", "a", "protocol", "message", "class", "." ]
def _AddMessageMethods(message_descriptor, cls): """Adds the methods to a protocol message class.""" if message_descriptor.is_extendable: def ClearExtension(self, extension): self.Extensions.ClearExtension(extension) def HasExtension(self, extension): return self.Extensions.HasExtension(extension) def HasField(self, field_name): return self._cmsg.HasField(field_name) def ClearField(self, field_name): child_cmessage = None if field_name in self._composite_fields: child_field = self._composite_fields[field_name] del self._composite_fields[field_name] child_cdescriptor = self.__descriptors[field_name] # TODO(anuraag): Support clearing repeated message fields as well. if (child_cdescriptor.label != _LABEL_REPEATED and child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): child_field._owner = None child_cmessage = child_field._cmsg if child_cmessage is not None: self._cmsg.ClearField(field_name, child_cmessage) else: self._cmsg.ClearField(field_name) def Clear(self): cmessages_to_release = [] for field_name, child_field in self._composite_fields.iteritems(): child_cdescriptor = self.__descriptors[field_name] # TODO(anuraag): Support clearing repeated message fields as well. if (child_cdescriptor.label != _LABEL_REPEATED and child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE): child_field._owner = None cmessages_to_release.append((child_cdescriptor, child_field._cmsg)) self._composite_fields.clear() self._cmsg.Clear(cmessages_to_release) def IsInitialized(self, errors=None): if self._cmsg.IsInitialized(): return True if errors is not None: errors.extend(self.FindInitializationErrors()); return False def SerializeToString(self): if not self.IsInitialized(): raise message.EncodeError( 'Message %s is missing required fields: %s' % ( self._cmsg.full_name, ','.join(self.FindInitializationErrors()))) return self._cmsg.SerializeToString() def SerializePartialToString(self): return self._cmsg.SerializePartialToString() def ParseFromString(self, serialized): self.Clear() self.MergeFromString(serialized) def MergeFromString(self, serialized): byte_size = self._cmsg.MergeFromString(serialized) if byte_size < 0: raise message.DecodeError('Unable to merge from string.') return byte_size def MergeFrom(self, msg): if not isinstance(msg, cls): raise TypeError( "Parameter to MergeFrom() must be instance of same class: " "expected %s got %s." % (cls.__name__, type(msg).__name__)) self._cmsg.MergeFrom(msg._cmsg) def CopyFrom(self, msg): self._cmsg.CopyFrom(msg._cmsg) def ByteSize(self): return self._cmsg.ByteSize() def SetInParent(self): return self._cmsg.SetInParent() def ListFields(self): all_fields = [] field_list = self._cmsg.ListFields() fields_by_name = cls.DESCRIPTOR.fields_by_name for is_extension, field_name in field_list: if is_extension: extension = cls._extensions_by_name[field_name] all_fields.append((extension, self.Extensions[extension])) else: field_descriptor = fields_by_name[field_name] all_fields.append( (field_descriptor, getattr(self, field_name))) all_fields.sort(key=lambda item: item[0].number) return all_fields def FindInitializationErrors(self): return self._cmsg.FindInitializationErrors() def __str__(self): return self._cmsg.DebugString() def __eq__(self, other): if self is other: return True if not isinstance(other, self.__class__): return False return self.ListFields() == other.ListFields() def __ne__(self, other): return not self == other def __hash__(self): raise TypeError('unhashable object') def __unicode__(self): # Lazy import to prevent circular import when text_format imports this file. from google.protobuf import text_format return text_format.MessageToString(self, as_utf8=True).decode('utf-8') # Attach the local methods to the message class. for key, value in locals().copy().iteritems(): if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'): setattr(cls, key, value) # Static methods: def RegisterExtension(extension_handle): extension_handle.containing_type = cls.DESCRIPTOR cls._extensions_by_name[extension_handle.full_name] = extension_handle if _IsMessageSetExtension(extension_handle): # MessageSet extension. Also register under type name. cls._extensions_by_name[ extension_handle.message_type.full_name] = extension_handle cls.RegisterExtension = staticmethod(RegisterExtension) def FromString(string): msg = cls() msg.MergeFromString(string) return msg cls.FromString = staticmethod(FromString)
[ "def", "_AddMessageMethods", "(", "message_descriptor", ",", "cls", ")", ":", "if", "message_descriptor", ".", "is_extendable", ":", "def", "ClearExtension", "(", "self", ",", "extension", ")", ":", "self", ".", "Extensions", ".", "ClearExtension", "(", "extension", ")", "def", "HasExtension", "(", "self", ",", "extension", ")", ":", "return", "self", ".", "Extensions", ".", "HasExtension", "(", "extension", ")", "def", "HasField", "(", "self", ",", "field_name", ")", ":", "return", "self", ".", "_cmsg", ".", "HasField", "(", "field_name", ")", "def", "ClearField", "(", "self", ",", "field_name", ")", ":", "child_cmessage", "=", "None", "if", "field_name", "in", "self", ".", "_composite_fields", ":", "child_field", "=", "self", ".", "_composite_fields", "[", "field_name", "]", "del", "self", ".", "_composite_fields", "[", "field_name", "]", "child_cdescriptor", "=", "self", ".", "__descriptors", "[", "field_name", "]", "# TODO(anuraag): Support clearing repeated message fields as well.", "if", "(", "child_cdescriptor", ".", "label", "!=", "_LABEL_REPEATED", "and", "child_cdescriptor", ".", "cpp_type", "==", "_CPPTYPE_MESSAGE", ")", ":", "child_field", ".", "_owner", "=", "None", "child_cmessage", "=", "child_field", ".", "_cmsg", "if", "child_cmessage", "is", "not", "None", ":", "self", ".", "_cmsg", ".", "ClearField", "(", "field_name", ",", "child_cmessage", ")", "else", ":", "self", ".", "_cmsg", ".", "ClearField", "(", "field_name", ")", "def", "Clear", "(", "self", ")", ":", "cmessages_to_release", "=", "[", "]", "for", "field_name", ",", "child_field", "in", "self", ".", "_composite_fields", ".", "iteritems", "(", ")", ":", "child_cdescriptor", "=", "self", ".", "__descriptors", "[", "field_name", "]", "# TODO(anuraag): Support clearing repeated message fields as well.", "if", "(", "child_cdescriptor", ".", "label", "!=", "_LABEL_REPEATED", "and", "child_cdescriptor", ".", "cpp_type", "==", "_CPPTYPE_MESSAGE", ")", ":", "child_field", ".", "_owner", "=", "None", "cmessages_to_release", ".", "append", "(", "(", "child_cdescriptor", ",", "child_field", ".", "_cmsg", ")", ")", "self", ".", "_composite_fields", ".", "clear", "(", ")", "self", ".", "_cmsg", ".", "Clear", "(", "cmessages_to_release", ")", "def", "IsInitialized", "(", "self", ",", "errors", "=", "None", ")", ":", "if", "self", ".", "_cmsg", ".", "IsInitialized", "(", ")", ":", "return", "True", "if", "errors", "is", "not", "None", ":", "errors", ".", "extend", "(", "self", ".", "FindInitializationErrors", "(", ")", ")", "return", "False", "def", "SerializeToString", "(", "self", ")", ":", "if", "not", "self", ".", "IsInitialized", "(", ")", ":", "raise", "message", ".", "EncodeError", "(", "'Message %s is missing required fields: %s'", "%", "(", "self", ".", "_cmsg", ".", "full_name", ",", "','", ".", "join", "(", "self", ".", "FindInitializationErrors", "(", ")", ")", ")", ")", "return", "self", ".", "_cmsg", ".", "SerializeToString", "(", ")", "def", "SerializePartialToString", "(", "self", ")", ":", "return", "self", ".", "_cmsg", ".", "SerializePartialToString", "(", ")", "def", "ParseFromString", "(", "self", ",", "serialized", ")", ":", "self", ".", "Clear", "(", ")", "self", ".", "MergeFromString", "(", "serialized", ")", "def", "MergeFromString", "(", "self", ",", "serialized", ")", ":", "byte_size", "=", "self", ".", "_cmsg", ".", "MergeFromString", "(", "serialized", ")", "if", "byte_size", "<", "0", ":", "raise", "message", ".", "DecodeError", "(", "'Unable to merge from string.'", ")", "return", "byte_size", "def", "MergeFrom", "(", "self", ",", "msg", ")", ":", "if", "not", "isinstance", "(", "msg", ",", "cls", ")", ":", "raise", "TypeError", "(", "\"Parameter to MergeFrom() must be instance of same class: \"", "\"expected %s got %s.\"", "%", "(", "cls", ".", "__name__", ",", "type", "(", "msg", ")", ".", "__name__", ")", ")", "self", ".", "_cmsg", ".", "MergeFrom", "(", "msg", ".", "_cmsg", ")", "def", "CopyFrom", "(", "self", ",", "msg", ")", ":", "self", ".", "_cmsg", ".", "CopyFrom", "(", "msg", ".", "_cmsg", ")", "def", "ByteSize", "(", "self", ")", ":", "return", "self", ".", "_cmsg", ".", "ByteSize", "(", ")", "def", "SetInParent", "(", "self", ")", ":", "return", "self", ".", "_cmsg", ".", "SetInParent", "(", ")", "def", "ListFields", "(", "self", ")", ":", "all_fields", "=", "[", "]", "field_list", "=", "self", ".", "_cmsg", ".", "ListFields", "(", ")", "fields_by_name", "=", "cls", ".", "DESCRIPTOR", ".", "fields_by_name", "for", "is_extension", ",", "field_name", "in", "field_list", ":", "if", "is_extension", ":", "extension", "=", "cls", ".", "_extensions_by_name", "[", "field_name", "]", "all_fields", ".", "append", "(", "(", "extension", ",", "self", ".", "Extensions", "[", "extension", "]", ")", ")", "else", ":", "field_descriptor", "=", "fields_by_name", "[", "field_name", "]", "all_fields", ".", "append", "(", "(", "field_descriptor", ",", "getattr", "(", "self", ",", "field_name", ")", ")", ")", "all_fields", ".", "sort", "(", "key", "=", "lambda", "item", ":", "item", "[", "0", "]", ".", "number", ")", "return", "all_fields", "def", "FindInitializationErrors", "(", "self", ")", ":", "return", "self", ".", "_cmsg", ".", "FindInitializationErrors", "(", ")", "def", "__str__", "(", "self", ")", ":", "return", "self", ".", "_cmsg", ".", "DebugString", "(", ")", "def", "__eq__", "(", "self", ",", "other", ")", ":", "if", "self", "is", "other", ":", "return", "True", "if", "not", "isinstance", "(", "other", ",", "self", ".", "__class__", ")", ":", "return", "False", "return", "self", ".", "ListFields", "(", ")", "==", "other", ".", "ListFields", "(", ")", "def", "__ne__", "(", "self", ",", "other", ")", ":", "return", "not", "self", "==", "other", "def", "__hash__", "(", "self", ")", ":", "raise", "TypeError", "(", "'unhashable object'", ")", "def", "__unicode__", "(", "self", ")", ":", "# Lazy import to prevent circular import when text_format imports this file.", "from", "google", ".", "protobuf", "import", "text_format", "return", "text_format", ".", "MessageToString", "(", "self", ",", "as_utf8", "=", "True", ")", ".", "decode", "(", "'utf-8'", ")", "# Attach the local methods to the message class.", "for", "key", ",", "value", "in", "locals", "(", ")", ".", "copy", "(", ")", ".", "iteritems", "(", ")", ":", "if", "key", "not", "in", "(", "'key'", ",", "'value'", ",", "'__builtins__'", ",", "'__name__'", ",", "'__doc__'", ")", ":", "setattr", "(", "cls", ",", "key", ",", "value", ")", "# Static methods:", "def", "RegisterExtension", "(", "extension_handle", ")", ":", "extension_handle", ".", "containing_type", "=", "cls", ".", "DESCRIPTOR", "cls", ".", "_extensions_by_name", "[", "extension_handle", ".", "full_name", "]", "=", "extension_handle", "if", "_IsMessageSetExtension", "(", "extension_handle", ")", ":", "# MessageSet extension. Also register under type name.", "cls", ".", "_extensions_by_name", "[", "extension_handle", ".", "message_type", ".", "full_name", "]", "=", "extension_handle", "cls", ".", "RegisterExtension", "=", "staticmethod", "(", "RegisterExtension", ")", "def", "FromString", "(", "string", ")", ":", "msg", "=", "cls", "(", ")", "msg", ".", "MergeFromString", "(", "string", ")", "return", "msg", "cls", ".", "FromString", "=", "staticmethod", "(", "FromString", ")" ]
https://github.com/quantOS-org/DataCore/blob/e2ef9bd2c22ee9e2845675b6435a14fa607f3551/mdlink/deps/windows/protobuf-2.5.0/python/google/protobuf/internal/cpp_message.py#L508-L654
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py3/numpy/ma/core.py
python
_MaskedBinaryOperation.__init__
(self, mbfunc, fillx=0, filly=0)
abfunc(fillx, filly) must be defined. abfunc(x, filly) = x for all x to enable reduce.
abfunc(fillx, filly) must be defined.
[ "abfunc", "(", "fillx", "filly", ")", "must", "be", "defined", "." ]
def __init__(self, mbfunc, fillx=0, filly=0): """ abfunc(fillx, filly) must be defined. abfunc(x, filly) = x for all x to enable reduce. """ super().__init__(mbfunc) self.fillx = fillx self.filly = filly ufunc_domain[mbfunc] = None ufunc_fills[mbfunc] = (fillx, filly)
[ "def", "__init__", "(", "self", ",", "mbfunc", ",", "fillx", "=", "0", ",", "filly", "=", "0", ")", ":", "super", "(", ")", ".", "__init__", "(", "mbfunc", ")", "self", ".", "fillx", "=", "fillx", "self", ".", "filly", "=", "filly", "ufunc_domain", "[", "mbfunc", "]", "=", "None", "ufunc_fills", "[", "mbfunc", "]", "=", "(", "fillx", ",", "filly", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/ma/core.py#L992-L1003
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TStr.GetPrimHashCd
(self)
return _snap.TStr_GetPrimHashCd(self)
GetPrimHashCd(TStr self) -> int Parameters: self: TStr const *
GetPrimHashCd(TStr self) -> int
[ "GetPrimHashCd", "(", "TStr", "self", ")", "-", ">", "int" ]
def GetPrimHashCd(self): """ GetPrimHashCd(TStr self) -> int Parameters: self: TStr const * """ return _snap.TStr_GetPrimHashCd(self)
[ "def", "GetPrimHashCd", "(", "self", ")", ":", "return", "_snap", ".", "TStr_GetPrimHashCd", "(", "self", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L10328-L10336
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/train/callback/_landscape.py
python
SummaryLandscape._check_device_ids
(device_ids)
Check device_ids type and value.
Check device_ids type and value.
[ "Check", "device_ids", "type", "and", "value", "." ]
def _check_device_ids(device_ids): """Check device_ids type and value.""" check_value_type('device_ids', device_ids, list) for i in device_ids: if not isinstance(i, int): raise TypeError(f'Landscape device_ids type should be int, ' f'but got the: {type(i)}.') #device_id should be between 0 and 7. if i < 0 or i > 7: raise ValueError(f'Landscape device_ids value should be between 0 and 7,but got {i}.')
[ "def", "_check_device_ids", "(", "device_ids", ")", ":", "check_value_type", "(", "'device_ids'", ",", "device_ids", ",", "list", ")", "for", "i", "in", "device_ids", ":", "if", "not", "isinstance", "(", "i", ",", "int", ")", ":", "raise", "TypeError", "(", "f'Landscape device_ids type should be int, '", "f'but got the: {type(i)}.'", ")", "#device_id should be between 0 and 7.", "if", "i", "<", "0", "or", "i", ">", "7", ":", "raise", "ValueError", "(", "f'Landscape device_ids value should be between 0 and 7,but got {i}.'", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/train/callback/_landscape.py#L811-L820
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py
python
Fixed.validate_version
(self, where=None)
return True
are we trying to operate on an old version?
are we trying to operate on an old version?
[ "are", "we", "trying", "to", "operate", "on", "an", "old", "version?" ]
def validate_version(self, where=None): """ are we trying to operate on an old version? """ return True
[ "def", "validate_version", "(", "self", ",", "where", "=", "None", ")", ":", "return", "True" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py#L2596-L2598
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/package_index.py
python
interpret_distro_name
( location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None )
Generate alternative interpretations of a source distro name Note: if `location` is a filesystem filename, you should call ``pkg_resources.normalize_path()`` on it before passing it to this routine!
Generate alternative interpretations of a source distro name
[ "Generate", "alternative", "interpretations", "of", "a", "source", "distro", "name" ]
def interpret_distro_name( location, basename, metadata, py_version=None, precedence=SOURCE_DIST, platform=None ): """Generate alternative interpretations of a source distro name Note: if `location` is a filesystem filename, you should call ``pkg_resources.normalize_path()`` on it before passing it to this routine! """ # Generate alternative interpretations of a source distro name # Because some packages are ambiguous as to name/versions split # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, # the spurious interpretations should be ignored, because in the event # there's also an "adns" package, the spurious "python-1.1.0" version will # compare lower than any numeric version number, and is therefore unlikely # to match a request for it. It's still a potential problem, though, and # in the long run PyPI and the distutils should go for "safe" names and # versions in distribution archive names (sdist and bdist). parts = basename.split('-') if not py_version and any(re.match(r'py\d\.\d$', p) for p in parts[2:]): # it is a bdist_dumb, not an sdist -- bail out return for p in range(1, len(parts) + 1): yield Distribution( location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), py_version=py_version, precedence=precedence, platform=platform )
[ "def", "interpret_distro_name", "(", "location", ",", "basename", ",", "metadata", ",", "py_version", "=", "None", ",", "precedence", "=", "SOURCE_DIST", ",", "platform", "=", "None", ")", ":", "# Generate alternative interpretations of a source distro name", "# Because some packages are ambiguous as to name/versions split", "# e.g. \"adns-python-1.1.0\", \"egenix-mx-commercial\", etc.", "# So, we generate each possible interepretation (e.g. \"adns, python-1.1.0\"", "# \"adns-python, 1.1.0\", and \"adns-python-1.1.0, no version\"). In practice,", "# the spurious interpretations should be ignored, because in the event", "# there's also an \"adns\" package, the spurious \"python-1.1.0\" version will", "# compare lower than any numeric version number, and is therefore unlikely", "# to match a request for it. It's still a potential problem, though, and", "# in the long run PyPI and the distutils should go for \"safe\" names and", "# versions in distribution archive names (sdist and bdist).", "parts", "=", "basename", ".", "split", "(", "'-'", ")", "if", "not", "py_version", "and", "any", "(", "re", ".", "match", "(", "r'py\\d\\.\\d$'", ",", "p", ")", "for", "p", "in", "parts", "[", "2", ":", "]", ")", ":", "# it is a bdist_dumb, not an sdist -- bail out", "return", "for", "p", "in", "range", "(", "1", ",", "len", "(", "parts", ")", "+", "1", ")", ":", "yield", "Distribution", "(", "location", ",", "metadata", ",", "'-'", ".", "join", "(", "parts", "[", ":", "p", "]", ")", ",", "'-'", ".", "join", "(", "parts", "[", "p", ":", "]", ")", ",", "py_version", "=", "py_version", ",", "precedence", "=", "precedence", ",", "platform", "=", "platform", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/package_index.py#L151-L183
apache/impala
8ddac48f3428c86f2cbd037ced89cfb903298b12
shell/ext-py/six-1.14.0/six.py
python
add_metaclass
(metaclass)
return wrapper
Class decorator for creating a class with a metaclass.
Class decorator for creating a class with a metaclass.
[ "Class", "decorator", "for", "creating", "a", "class", "with", "a", "metaclass", "." ]
def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) if hasattr(cls, '__qualname__'): orig_vars['__qualname__'] = cls.__qualname__ return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper
[ "def", "add_metaclass", "(", "metaclass", ")", ":", "def", "wrapper", "(", "cls", ")", ":", "orig_vars", "=", "cls", ".", "__dict__", ".", "copy", "(", ")", "slots", "=", "orig_vars", ".", "get", "(", "'__slots__'", ")", "if", "slots", "is", "not", "None", ":", "if", "isinstance", "(", "slots", ",", "str", ")", ":", "slots", "=", "[", "slots", "]", "for", "slots_var", "in", "slots", ":", "orig_vars", ".", "pop", "(", "slots_var", ")", "orig_vars", ".", "pop", "(", "'__dict__'", ",", "None", ")", "orig_vars", ".", "pop", "(", "'__weakref__'", ",", "None", ")", "if", "hasattr", "(", "cls", ",", "'__qualname__'", ")", ":", "orig_vars", "[", "'__qualname__'", "]", "=", "cls", ".", "__qualname__", "return", "metaclass", "(", "cls", ".", "__name__", ",", "cls", ".", "__bases__", ",", "orig_vars", ")", "return", "wrapper" ]
https://github.com/apache/impala/blob/8ddac48f3428c86f2cbd037ced89cfb903298b12/shell/ext-py/six-1.14.0/six.py#L864-L879
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/xml/sax/saxutils.py
python
prepare_input_source
(source, base = "")
return source
This function takes an InputSource and an optional base URL and returns a fully resolved InputSource object ready for reading.
This function takes an InputSource and an optional base URL and returns a fully resolved InputSource object ready for reading.
[ "This", "function", "takes", "an", "InputSource", "and", "an", "optional", "base", "URL", "and", "returns", "a", "fully", "resolved", "InputSource", "object", "ready", "for", "reading", "." ]
def prepare_input_source(source, base = ""): """This function takes an InputSource and an optional base URL and returns a fully resolved InputSource object ready for reading.""" if type(source) in _StringTypes: source = xmlreader.InputSource(source) elif hasattr(source, "read"): f = source source = xmlreader.InputSource() source.setByteStream(f) if hasattr(f, "name"): source.setSystemId(f.name) if source.getByteStream() is None: try: sysid = source.getSystemId() basehead = os.path.dirname(os.path.normpath(base)) encoding = sys.getfilesystemencoding() if isinstance(sysid, unicode): if not isinstance(basehead, unicode): try: basehead = basehead.decode(encoding) except UnicodeDecodeError: sysid = sysid.encode(encoding) else: if isinstance(basehead, unicode): try: sysid = sysid.decode(encoding) except UnicodeDecodeError: basehead = basehead.encode(encoding) sysidfilename = os.path.join(basehead, sysid) isfile = os.path.isfile(sysidfilename) except UnicodeError: isfile = False if isfile: source.setSystemId(sysidfilename) f = open(sysidfilename, "rb") else: source.setSystemId(urlparse.urljoin(base, source.getSystemId())) f = urllib.urlopen(source.getSystemId()) source.setByteStream(f) return source
[ "def", "prepare_input_source", "(", "source", ",", "base", "=", "\"\"", ")", ":", "if", "type", "(", "source", ")", "in", "_StringTypes", ":", "source", "=", "xmlreader", ".", "InputSource", "(", "source", ")", "elif", "hasattr", "(", "source", ",", "\"read\"", ")", ":", "f", "=", "source", "source", "=", "xmlreader", ".", "InputSource", "(", ")", "source", ".", "setByteStream", "(", "f", ")", "if", "hasattr", "(", "f", ",", "\"name\"", ")", ":", "source", ".", "setSystemId", "(", "f", ".", "name", ")", "if", "source", ".", "getByteStream", "(", ")", "is", "None", ":", "try", ":", "sysid", "=", "source", ".", "getSystemId", "(", ")", "basehead", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "normpath", "(", "base", ")", ")", "encoding", "=", "sys", ".", "getfilesystemencoding", "(", ")", "if", "isinstance", "(", "sysid", ",", "unicode", ")", ":", "if", "not", "isinstance", "(", "basehead", ",", "unicode", ")", ":", "try", ":", "basehead", "=", "basehead", ".", "decode", "(", "encoding", ")", "except", "UnicodeDecodeError", ":", "sysid", "=", "sysid", ".", "encode", "(", "encoding", ")", "else", ":", "if", "isinstance", "(", "basehead", ",", "unicode", ")", ":", "try", ":", "sysid", "=", "sysid", ".", "decode", "(", "encoding", ")", "except", "UnicodeDecodeError", ":", "basehead", "=", "basehead", ".", "encode", "(", "encoding", ")", "sysidfilename", "=", "os", ".", "path", ".", "join", "(", "basehead", ",", "sysid", ")", "isfile", "=", "os", ".", "path", ".", "isfile", "(", "sysidfilename", ")", "except", "UnicodeError", ":", "isfile", "=", "False", "if", "isfile", ":", "source", ".", "setSystemId", "(", "sysidfilename", ")", "f", "=", "open", "(", "sysidfilename", ",", "\"rb\"", ")", "else", ":", "source", ".", "setSystemId", "(", "urlparse", ".", "urljoin", "(", "base", ",", "source", ".", "getSystemId", "(", ")", ")", ")", "f", "=", "urllib", ".", "urlopen", "(", "source", ".", "getSystemId", "(", ")", ")", "source", ".", "setByteStream", "(", "f", ")", "return", "source" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/xml/sax/saxutils.py#L303-L346
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
third_party/numpy/files/numpy/lib/recfunctions.py
python
append_fields
(base, names, data, dtypes=None, fill_value= -1, usemask=True, asrecarray=False)
return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
Add new fields to an existing array. The names of the fields are given with the `names` arguments, the corresponding values with the `data` arguments. If a single field is appended, `names`, `data` and `dtypes` do not have to be lists but just values. Parameters ---------- base : array Input array to extend. names : string, sequence String or sequence of strings corresponding to the names of the new fields. data : array or sequence of arrays Array or sequence of arrays storing the fields to add to the base. dtypes : sequence of datatypes, optional Datatype or sequence of datatypes. If None, the datatypes are estimated from the `data`. fill_value : {float}, optional Filling value used to pad missing data on the shorter arrays. usemask : {False, True}, optional Whether to return a masked array or not. asrecarray : {False, True}, optional Whether to return a recarray (MaskedRecords) or not.
Add new fields to an existing array.
[ "Add", "new", "fields", "to", "an", "existing", "array", "." ]
def append_fields(base, names, data, dtypes=None, fill_value= -1, usemask=True, asrecarray=False): """ Add new fields to an existing array. The names of the fields are given with the `names` arguments, the corresponding values with the `data` arguments. If a single field is appended, `names`, `data` and `dtypes` do not have to be lists but just values. Parameters ---------- base : array Input array to extend. names : string, sequence String or sequence of strings corresponding to the names of the new fields. data : array or sequence of arrays Array or sequence of arrays storing the fields to add to the base. dtypes : sequence of datatypes, optional Datatype or sequence of datatypes. If None, the datatypes are estimated from the `data`. fill_value : {float}, optional Filling value used to pad missing data on the shorter arrays. usemask : {False, True}, optional Whether to return a masked array or not. asrecarray : {False, True}, optional Whether to return a recarray (MaskedRecords) or not. """ # Check the names if isinstance(names, (tuple, list)): if len(names) != len(data): msg = "The number of arrays does not match the number of names" raise ValueError(msg) elif isinstance(names, basestring): names = [names, ] data = [data, ] # if dtypes is None: data = [np.array(a, copy=False, subok=True) for a in data] data = [a.view([(name, a.dtype)]) for (name, a) in zip(names, data)] else : if not isinstance(dtypes, (tuple, list)): dtypes = [dtypes, ] if len(data) != len(dtypes): if len(dtypes) == 1: dtypes = dtypes * len(data) else: msg = "The dtypes argument must be None, a dtype, or a list." raise ValueError(msg) data = [np.array(a, copy=False, subok=True, dtype=d).view([(n, d)]) for (a, n, d) in zip(data, names, dtypes)] # base = merge_arrays(base, usemask=usemask, fill_value=fill_value) if len(data) > 1: data = merge_arrays(data, flatten=True, usemask=usemask, fill_value=fill_value) else: data = data.pop() # output = ma.masked_all(max(len(base), len(data)), dtype=base.dtype.descr + data.dtype.descr) output = recursive_fill_fields(base, output) output = recursive_fill_fields(data, output) # return _fix_output(output, usemask=usemask, asrecarray=asrecarray)
[ "def", "append_fields", "(", "base", ",", "names", ",", "data", ",", "dtypes", "=", "None", ",", "fill_value", "=", "-", "1", ",", "usemask", "=", "True", ",", "asrecarray", "=", "False", ")", ":", "# Check the names", "if", "isinstance", "(", "names", ",", "(", "tuple", ",", "list", ")", ")", ":", "if", "len", "(", "names", ")", "!=", "len", "(", "data", ")", ":", "msg", "=", "\"The number of arrays does not match the number of names\"", "raise", "ValueError", "(", "msg", ")", "elif", "isinstance", "(", "names", ",", "basestring", ")", ":", "names", "=", "[", "names", ",", "]", "data", "=", "[", "data", ",", "]", "#", "if", "dtypes", "is", "None", ":", "data", "=", "[", "np", ".", "array", "(", "a", ",", "copy", "=", "False", ",", "subok", "=", "True", ")", "for", "a", "in", "data", "]", "data", "=", "[", "a", ".", "view", "(", "[", "(", "name", ",", "a", ".", "dtype", ")", "]", ")", "for", "(", "name", ",", "a", ")", "in", "zip", "(", "names", ",", "data", ")", "]", "else", ":", "if", "not", "isinstance", "(", "dtypes", ",", "(", "tuple", ",", "list", ")", ")", ":", "dtypes", "=", "[", "dtypes", ",", "]", "if", "len", "(", "data", ")", "!=", "len", "(", "dtypes", ")", ":", "if", "len", "(", "dtypes", ")", "==", "1", ":", "dtypes", "=", "dtypes", "*", "len", "(", "data", ")", "else", ":", "msg", "=", "\"The dtypes argument must be None, a dtype, or a list.\"", "raise", "ValueError", "(", "msg", ")", "data", "=", "[", "np", ".", "array", "(", "a", ",", "copy", "=", "False", ",", "subok", "=", "True", ",", "dtype", "=", "d", ")", ".", "view", "(", "[", "(", "n", ",", "d", ")", "]", ")", "for", "(", "a", ",", "n", ",", "d", ")", "in", "zip", "(", "data", ",", "names", ",", "dtypes", ")", "]", "#", "base", "=", "merge_arrays", "(", "base", ",", "usemask", "=", "usemask", ",", "fill_value", "=", "fill_value", ")", "if", "len", "(", "data", ")", ">", "1", ":", "data", "=", "merge_arrays", "(", "data", ",", "flatten", "=", "True", ",", "usemask", "=", "usemask", ",", "fill_value", "=", "fill_value", ")", "else", ":", "data", "=", "data", ".", "pop", "(", ")", "#", "output", "=", "ma", ".", "masked_all", "(", "max", "(", "len", "(", "base", ")", ",", "len", "(", "data", ")", ")", ",", "dtype", "=", "base", ".", "dtype", ".", "descr", "+", "data", ".", "dtype", ".", "descr", ")", "output", "=", "recursive_fill_fields", "(", "base", ",", "output", ")", "output", "=", "recursive_fill_fields", "(", "data", ",", "output", ")", "#", "return", "_fix_output", "(", "output", ",", "usemask", "=", "usemask", ",", "asrecarray", "=", "asrecarray", ")" ]
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/lib/recfunctions.py#L575-L641
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py3/IPython/core/interactiveshell.py
python
InteractiveShell.reset
(self, new_session=True, aggressive=False)
Clear all internal namespaces, and attempt to release references to user objects. If new_session is True, a new history session will be opened.
Clear all internal namespaces, and attempt to release references to user objects.
[ "Clear", "all", "internal", "namespaces", "and", "attempt", "to", "release", "references", "to", "user", "objects", "." ]
def reset(self, new_session=True, aggressive=False): """Clear all internal namespaces, and attempt to release references to user objects. If new_session is True, a new history session will be opened. """ # Clear histories self.history_manager.reset(new_session) # Reset counter used to index all histories if new_session: self.execution_count = 1 # Reset last execution result self.last_execution_succeeded = True self.last_execution_result = None # Flush cached output items if self.displayhook.do_full_cache: self.displayhook.flush() # The main execution namespaces must be cleared very carefully, # skipping the deletion of the builtin-related keys, because doing so # would cause errors in many object's __del__ methods. if self.user_ns is not self.user_global_ns: self.user_ns.clear() ns = self.user_global_ns drop_keys = set(ns.keys()) drop_keys.discard('__builtin__') drop_keys.discard('__builtins__') drop_keys.discard('__name__') for k in drop_keys: del ns[k] self.user_ns_hidden.clear() # Restore the user namespaces to minimal usability self.init_user_ns() if aggressive and not hasattr(self, "_sys_modules_keys"): print("Cannot restore sys.module, no snapshot") elif aggressive: print("culling sys module...") current_keys = set(sys.modules.keys()) for k in current_keys - self._sys_modules_keys: if k.startswith("multiprocessing"): continue del sys.modules[k] # Restore the default and user aliases self.alias_manager.clear_aliases() self.alias_manager.init_aliases() # Now define aliases that only make sense on the terminal, because they # need direct access to the console in a way that we can't emulate in # GUI or web frontend if os.name == 'posix': for cmd in ('clear', 'more', 'less', 'man'): if cmd not in self.magics_manager.magics['line']: self.alias_manager.soft_define_alias(cmd, cmd) # Flush the private list of module references kept for script # execution protection self.clear_main_mod_cache()
[ "def", "reset", "(", "self", ",", "new_session", "=", "True", ",", "aggressive", "=", "False", ")", ":", "# Clear histories", "self", ".", "history_manager", ".", "reset", "(", "new_session", ")", "# Reset counter used to index all histories", "if", "new_session", ":", "self", ".", "execution_count", "=", "1", "# Reset last execution result", "self", ".", "last_execution_succeeded", "=", "True", "self", ".", "last_execution_result", "=", "None", "# Flush cached output items", "if", "self", ".", "displayhook", ".", "do_full_cache", ":", "self", ".", "displayhook", ".", "flush", "(", ")", "# The main execution namespaces must be cleared very carefully,", "# skipping the deletion of the builtin-related keys, because doing so", "# would cause errors in many object's __del__ methods.", "if", "self", ".", "user_ns", "is", "not", "self", ".", "user_global_ns", ":", "self", ".", "user_ns", ".", "clear", "(", ")", "ns", "=", "self", ".", "user_global_ns", "drop_keys", "=", "set", "(", "ns", ".", "keys", "(", ")", ")", "drop_keys", ".", "discard", "(", "'__builtin__'", ")", "drop_keys", ".", "discard", "(", "'__builtins__'", ")", "drop_keys", ".", "discard", "(", "'__name__'", ")", "for", "k", "in", "drop_keys", ":", "del", "ns", "[", "k", "]", "self", ".", "user_ns_hidden", ".", "clear", "(", ")", "# Restore the user namespaces to minimal usability", "self", ".", "init_user_ns", "(", ")", "if", "aggressive", "and", "not", "hasattr", "(", "self", ",", "\"_sys_modules_keys\"", ")", ":", "print", "(", "\"Cannot restore sys.module, no snapshot\"", ")", "elif", "aggressive", ":", "print", "(", "\"culling sys module...\"", ")", "current_keys", "=", "set", "(", "sys", ".", "modules", ".", "keys", "(", ")", ")", "for", "k", "in", "current_keys", "-", "self", ".", "_sys_modules_keys", ":", "if", "k", ".", "startswith", "(", "\"multiprocessing\"", ")", ":", "continue", "del", "sys", ".", "modules", "[", "k", "]", "# Restore the default and user aliases", "self", ".", "alias_manager", ".", "clear_aliases", "(", ")", "self", ".", "alias_manager", ".", "init_aliases", "(", ")", "# Now define aliases that only make sense on the terminal, because they", "# need direct access to the console in a way that we can't emulate in", "# GUI or web frontend", "if", "os", ".", "name", "==", "'posix'", ":", "for", "cmd", "in", "(", "'clear'", ",", "'more'", ",", "'less'", ",", "'man'", ")", ":", "if", "cmd", "not", "in", "self", ".", "magics_manager", ".", "magics", "[", "'line'", "]", ":", "self", ".", "alias_manager", ".", "soft_define_alias", "(", "cmd", ",", "cmd", ")", "# Flush the private list of module references kept for script", "# execution protection", "self", ".", "clear_main_mod_cache", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/core/interactiveshell.py#L1422-L1483
KratosMultiphysics/Kratos
0000833054ed0503424eb28205d6508d9ca6cbbc
applications/ContactStructuralMechanicsApplication/python_scripts/adaptative_remeshing_contact_structural_mechanics_analysis.py
python
AdaptativeRemeshingContactStructuralMechanicsAnalysis.Initialize
(self)
Initializing the Analysis
Initializing the Analysis
[ "Initializing", "the", "Analysis" ]
def Initialize(self): """ Initializing the Analysis """ super(AdaptativeRemeshingContactStructuralMechanicsAnalysis, self).Initialize() computing_model_part = self._GetSolver().GetComputingModelPart() if not self.process_remesh: convergence_criteria = self._GetSolver()._GetConvergenceCriterion() convergence_criteria.Initialize(computing_model_part) # Ensuring to have conditions on the BC before remesh is_surface = False for elem in computing_model_part.Elements: geom = elem.GetGeometry() if geom.WorkingSpaceDimension() != geom.LocalSpaceDimension(): is_surface = True break if not is_surface: list_model_parts = [] # We need to detect the conditions in the boundary conditions if self.project_parameters.Has("constraints_process_list"): constraints_process_list = self.project_parameters["constraints_process_list"] for i in range(0,constraints_process_list.size()): item = constraints_process_list[i] list_model_parts.append(item["Parameters"]["model_part_name"].GetString()) skin_detection_parameters = KM.Parameters(""" { "list_model_parts_to_assign_conditions" : [] } """) for name_mp in list_model_parts: skin_detection_parameters["list_model_parts_to_assign_conditions"].Append(name_mp) if computing_model_part.ProcessInfo[KM.DOMAIN_SIZE] == 2: detect_skin = KM.SkinDetectionProcess2D(computing_model_part, skin_detection_parameters) else: detect_skin = KM.SkinDetectionProcess3D(computing_model_part, skin_detection_parameters) detect_skin.Execute() self._GetSolver().SetEchoLevel(self.echo_level)
[ "def", "Initialize", "(", "self", ")", ":", "super", "(", "AdaptativeRemeshingContactStructuralMechanicsAnalysis", ",", "self", ")", ".", "Initialize", "(", ")", "computing_model_part", "=", "self", ".", "_GetSolver", "(", ")", ".", "GetComputingModelPart", "(", ")", "if", "not", "self", ".", "process_remesh", ":", "convergence_criteria", "=", "self", ".", "_GetSolver", "(", ")", ".", "_GetConvergenceCriterion", "(", ")", "convergence_criteria", ".", "Initialize", "(", "computing_model_part", ")", "# Ensuring to have conditions on the BC before remesh", "is_surface", "=", "False", "for", "elem", "in", "computing_model_part", ".", "Elements", ":", "geom", "=", "elem", ".", "GetGeometry", "(", ")", "if", "geom", ".", "WorkingSpaceDimension", "(", ")", "!=", "geom", ".", "LocalSpaceDimension", "(", ")", ":", "is_surface", "=", "True", "break", "if", "not", "is_surface", ":", "list_model_parts", "=", "[", "]", "# We need to detect the conditions in the boundary conditions", "if", "self", ".", "project_parameters", ".", "Has", "(", "\"constraints_process_list\"", ")", ":", "constraints_process_list", "=", "self", ".", "project_parameters", "[", "\"constraints_process_list\"", "]", "for", "i", "in", "range", "(", "0", ",", "constraints_process_list", ".", "size", "(", ")", ")", ":", "item", "=", "constraints_process_list", "[", "i", "]", "list_model_parts", ".", "append", "(", "item", "[", "\"Parameters\"", "]", "[", "\"model_part_name\"", "]", ".", "GetString", "(", ")", ")", "skin_detection_parameters", "=", "KM", ".", "Parameters", "(", "\"\"\"\n {\n \"list_model_parts_to_assign_conditions\" : []\n }\n \"\"\"", ")", "for", "name_mp", "in", "list_model_parts", ":", "skin_detection_parameters", "[", "\"list_model_parts_to_assign_conditions\"", "]", ".", "Append", "(", "name_mp", ")", "if", "computing_model_part", ".", "ProcessInfo", "[", "KM", ".", "DOMAIN_SIZE", "]", "==", "2", ":", "detect_skin", "=", "KM", ".", "SkinDetectionProcess2D", "(", "computing_model_part", ",", "skin_detection_parameters", ")", "else", ":", "detect_skin", "=", "KM", ".", "SkinDetectionProcess3D", "(", "computing_model_part", ",", "skin_detection_parameters", ")", "detect_skin", ".", "Execute", "(", ")", "self", ".", "_GetSolver", "(", ")", ".", "SetEchoLevel", "(", "self", ".", "echo_level", ")" ]
https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/ContactStructuralMechanicsApplication/python_scripts/adaptative_remeshing_contact_structural_mechanics_analysis.py#L57-L94
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py
python
Text.dlineinfo
(self, index)
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
Return tuple (x,y,width,height,baseline) giving the bounding box and baseline position of the visible part of the line containing the character at INDEX.
Return tuple (x,y,width,height,baseline) giving the bounding box and baseline position of the visible part of the line containing the character at INDEX.
[ "Return", "tuple", "(", "x", "y", "width", "height", "baseline", ")", "giving", "the", "bounding", "box", "and", "baseline", "position", "of", "the", "visible", "part", "of", "the", "line", "containing", "the", "character", "at", "INDEX", "." ]
def dlineinfo(self, index): """Return tuple (x,y,width,height,baseline) giving the bounding box and baseline position of the visible part of the line containing the character at INDEX.""" return self._getints(self.tk.call(self._w, 'dlineinfo', index))
[ "def", "dlineinfo", "(", "self", ",", "index", ")", ":", "return", "self", ".", "_getints", "(", "self", ".", "tk", ".", "call", "(", "self", ".", "_w", ",", "'dlineinfo'", ",", "index", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/__init__.py#L3140-L3144
hfinkel/llvm-project-cxxjit
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
clang/bindings/python/clang/cindex.py
python
Cursor.is_static_method
(self)
return conf.lib.clang_CXXMethod_isStatic(self)
Returns True if the cursor refers to a C++ member function or member function template that is declared 'static'.
Returns True if the cursor refers to a C++ member function or member function template that is declared 'static'.
[ "Returns", "True", "if", "the", "cursor", "refers", "to", "a", "C", "++", "member", "function", "or", "member", "function", "template", "that", "is", "declared", "static", "." ]
def is_static_method(self): """Returns True if the cursor refers to a C++ member function or member function template that is declared 'static'. """ return conf.lib.clang_CXXMethod_isStatic(self)
[ "def", "is_static_method", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_CXXMethod_isStatic", "(", "self", ")" ]
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/clang/bindings/python/clang/cindex.py#L1485-L1489
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/nn/probability/distribution/distribution.py
python
Distribution._set_sd
(self)
Set standard deviation based on the availability of `_sd` and `_var`.
Set standard deviation based on the availability of `_sd` and `_var`.
[ "Set", "standard", "deviation", "based", "on", "the", "availability", "of", "_sd", "and", "_var", "." ]
def _set_sd(self): """ Set standard deviation based on the availability of `_sd` and `_var`. """ if hasattr(self, '_sd'): self._call_sd = self._sd elif hasattr(self, '_var'): self._call_sd = self._calc_sd_from_var else: self._call_sd = self._raise_not_implemented_error('sd')
[ "def", "_set_sd", "(", "self", ")", ":", "if", "hasattr", "(", "self", ",", "'_sd'", ")", ":", "self", ".", "_call_sd", "=", "self", ".", "_sd", "elif", "hasattr", "(", "self", ",", "'_var'", ")", ":", "self", ".", "_call_sd", "=", "self", ".", "_calc_sd_from_var", "else", ":", "self", ".", "_call_sd", "=", "self", ".", "_raise_not_implemented_error", "(", "'sd'", ")" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/probability/distribution/distribution.py#L256-L265
google/mysql-protobuf
467cda676afaa49e762c5c9164a43f6ad31a1fbf
protobuf/python/google/protobuf/internal/python_message.py
python
_AddPropertiesForField
(field, cls)
Adds a public property for a protocol message field. Clients can use this property to get and (in the case of non-repeated scalar fields) directly set the value of a protocol message field. Args: field: A FieldDescriptor for this field. cls: The class we're constructing.
Adds a public property for a protocol message field. Clients can use this property to get and (in the case of non-repeated scalar fields) directly set the value of a protocol message field.
[ "Adds", "a", "public", "property", "for", "a", "protocol", "message", "field", ".", "Clients", "can", "use", "this", "property", "to", "get", "and", "(", "in", "the", "case", "of", "non", "-", "repeated", "scalar", "fields", ")", "directly", "set", "the", "value", "of", "a", "protocol", "message", "field", "." ]
def _AddPropertiesForField(field, cls): """Adds a public property for a protocol message field. Clients can use this property to get and (in the case of non-repeated scalar fields) directly set the value of a protocol message field. Args: field: A FieldDescriptor for this field. cls: The class we're constructing. """ # Catch it if we add other types that we should # handle specially here. assert _FieldDescriptor.MAX_CPPTYPE == 10 constant_name = field.name.upper() + "_FIELD_NUMBER" setattr(cls, constant_name, field.number) if field.label == _FieldDescriptor.LABEL_REPEATED: _AddPropertiesForRepeatedField(field, cls) elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: _AddPropertiesForNonRepeatedCompositeField(field, cls) else: _AddPropertiesForNonRepeatedScalarField(field, cls)
[ "def", "_AddPropertiesForField", "(", "field", ",", "cls", ")", ":", "# Catch it if we add other types that we should", "# handle specially here.", "assert", "_FieldDescriptor", ".", "MAX_CPPTYPE", "==", "10", "constant_name", "=", "field", ".", "name", ".", "upper", "(", ")", "+", "\"_FIELD_NUMBER\"", "setattr", "(", "cls", ",", "constant_name", ",", "field", ".", "number", ")", "if", "field", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", ":", "_AddPropertiesForRepeatedField", "(", "field", ",", "cls", ")", "elif", "field", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "_AddPropertiesForNonRepeatedCompositeField", "(", "field", ",", "cls", ")", "else", ":", "_AddPropertiesForNonRepeatedScalarField", "(", "field", ",", "cls", ")" ]
https://github.com/google/mysql-protobuf/blob/467cda676afaa49e762c5c9164a43f6ad31a1fbf/protobuf/python/google/protobuf/internal/python_message.py#L496-L518
adobe/chromium
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
tools/code_coverage/croc_html.py
python
CrocHtml.AddSectionHeader
(self, table, caption, itemtype, is_file=False)
Adds a section header to the coverage table. Args: table: Table to add rows to. caption: Caption for section, if not None. itemtype: Type of items in this section, if not None. is_file: Are items in this section files?
Adds a section header to the coverage table.
[ "Adds", "a", "section", "header", "to", "the", "coverage", "table", "." ]
def AddSectionHeader(self, table, caption, itemtype, is_file=False): """Adds a section header to the coverage table. Args: table: Table to add rows to. caption: Caption for section, if not None. itemtype: Type of items in this section, if not None. is_file: Are items in this section files? """ if caption is not None: table.E('tr').E('td', e_class='secdesc', colspan=8).Text(caption) sec_hdr = table.E('tr') if itemtype is not None: sec_hdr.E('td', e_class='section').Text(itemtype) sec_hdr.E('td', e_class='section').Text('Coverage') sec_hdr.E('td', e_class='section', colspan=3).Text( 'Lines executed / instrumented / missing') graph = sec_hdr.E('td', e_class='section') graph.E('span', style='color:#00FF00').Text('exe') graph.Text(' / ') graph.E('span', style='color:#FFFF00').Text('inst') graph.Text(' / ') graph.E('span', style='color:#FF0000').Text('miss') if is_file: sec_hdr.E('td', e_class='section').Text('Language') sec_hdr.E('td', e_class='section').Text('Group') else: sec_hdr.E('td', e_class='section', colspan=2)
[ "def", "AddSectionHeader", "(", "self", ",", "table", ",", "caption", ",", "itemtype", ",", "is_file", "=", "False", ")", ":", "if", "caption", "is", "not", "None", ":", "table", ".", "E", "(", "'tr'", ")", ".", "E", "(", "'td'", ",", "e_class", "=", "'secdesc'", ",", "colspan", "=", "8", ")", ".", "Text", "(", "caption", ")", "sec_hdr", "=", "table", ".", "E", "(", "'tr'", ")", "if", "itemtype", "is", "not", "None", ":", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ")", ".", "Text", "(", "itemtype", ")", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ")", ".", "Text", "(", "'Coverage'", ")", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ",", "colspan", "=", "3", ")", ".", "Text", "(", "'Lines executed / instrumented / missing'", ")", "graph", "=", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ")", "graph", ".", "E", "(", "'span'", ",", "style", "=", "'color:#00FF00'", ")", ".", "Text", "(", "'exe'", ")", "graph", ".", "Text", "(", "' / '", ")", "graph", ".", "E", "(", "'span'", ",", "style", "=", "'color:#FFFF00'", ")", ".", "Text", "(", "'inst'", ")", "graph", ".", "Text", "(", "' / '", ")", "graph", ".", "E", "(", "'span'", ",", "style", "=", "'color:#FF0000'", ")", ".", "Text", "(", "'miss'", ")", "if", "is_file", ":", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ")", ".", "Text", "(", "'Language'", ")", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ")", ".", "Text", "(", "'Group'", ")", "else", ":", "sec_hdr", ".", "E", "(", "'td'", ",", "e_class", "=", "'section'", ",", "colspan", "=", "2", ")" ]
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/tools/code_coverage/croc_html.py#L186-L219
natanielruiz/android-yolo
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
jni-build/jni/include/tensorflow/contrib/distributions/python/ops/dirichlet_multinomial.py
python
DirichletMultinomial.variance
(self, name="mean")
Class variances for every batch member. The variance for each batch member is defined as the following: ``` Var(X_j) = n * alpha_j / alpha_0 * (1 - alpha_j / alpha_0) * (n + alpha_0) / (1 + alpha_0) ``` where `alpha_0 = sum_j alpha_j`. The covariance between elements in a batch is defined as: ``` Cov(X_i, X_j) = -n * alpha_i * alpha_j / alpha_0 ** 2 * (n + alpha_0) / (1 + alpha_0) ``` Args: name: The name for this op. Returns: A `Tensor` representing the variances for each batch member.
Class variances for every batch member.
[ "Class", "variances", "for", "every", "batch", "member", "." ]
def variance(self, name="mean"): """Class variances for every batch member. The variance for each batch member is defined as the following: ``` Var(X_j) = n * alpha_j / alpha_0 * (1 - alpha_j / alpha_0) * (n + alpha_0) / (1 + alpha_0) ``` where `alpha_0 = sum_j alpha_j`. The covariance between elements in a batch is defined as: ``` Cov(X_i, X_j) = -n * alpha_i * alpha_j / alpha_0 ** 2 * (n + alpha_0) / (1 + alpha_0) ``` Args: name: The name for this op. Returns: A `Tensor` representing the variances for each batch member. """ alpha = self._alpha alpha_sum = self._alpha_sum n = self._n with ops.name_scope(self.name): with ops.op_scope([alpha, alpha_sum, n], name): expanded_alpha_sum = array_ops.expand_dims(alpha_sum, -1) shared_factor = n * (expanded_alpha_sum + n) / ( expanded_alpha_sum + 1) * array_ops.ones_like(alpha) mean_no_n = alpha / expanded_alpha_sum expanded_mean_no_n = array_ops.expand_dims(mean_no_n, -1) variance = -math_ops.batch_matmul( expanded_mean_no_n, expanded_mean_no_n, adj_y=True) variance += array_ops.batch_matrix_diag(mean_no_n) variance *= array_ops.expand_dims(shared_factor, -1) return variance
[ "def", "variance", "(", "self", ",", "name", "=", "\"mean\"", ")", ":", "alpha", "=", "self", ".", "_alpha", "alpha_sum", "=", "self", ".", "_alpha_sum", "n", "=", "self", ".", "_n", "with", "ops", ".", "name_scope", "(", "self", ".", "name", ")", ":", "with", "ops", ".", "op_scope", "(", "[", "alpha", ",", "alpha_sum", ",", "n", "]", ",", "name", ")", ":", "expanded_alpha_sum", "=", "array_ops", ".", "expand_dims", "(", "alpha_sum", ",", "-", "1", ")", "shared_factor", "=", "n", "*", "(", "expanded_alpha_sum", "+", "n", ")", "/", "(", "expanded_alpha_sum", "+", "1", ")", "*", "array_ops", ".", "ones_like", "(", "alpha", ")", "mean_no_n", "=", "alpha", "/", "expanded_alpha_sum", "expanded_mean_no_n", "=", "array_ops", ".", "expand_dims", "(", "mean_no_n", ",", "-", "1", ")", "variance", "=", "-", "math_ops", ".", "batch_matmul", "(", "expanded_mean_no_n", ",", "expanded_mean_no_n", ",", "adj_y", "=", "True", ")", "variance", "+=", "array_ops", ".", "batch_matrix_diag", "(", "mean_no_n", ")", "variance", "*=", "array_ops", ".", "expand_dims", "(", "shared_factor", ",", "-", "1", ")", "return", "variance" ]
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/dirichlet_multinomial.py#L210-L250
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/generic.py
python
NDFrame._construct_axes_dict_from
(self, axes, **kwargs)
return d
Return an axes dictionary for the passed axes.
Return an axes dictionary for the passed axes.
[ "Return", "an", "axes", "dictionary", "for", "the", "passed", "axes", "." ]
def _construct_axes_dict_from(self, axes, **kwargs): """Return an axes dictionary for the passed axes.""" d = {a: ax for a, ax in zip(self._AXIS_ORDERS, axes)} d.update(kwargs) return d
[ "def", "_construct_axes_dict_from", "(", "self", ",", "axes", ",", "*", "*", "kwargs", ")", ":", "d", "=", "{", "a", ":", "ax", "for", "a", ",", "ax", "in", "zip", "(", "self", ".", "_AXIS_ORDERS", ",", "axes", ")", "}", "d", ".", "update", "(", "kwargs", ")", "return", "d" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/generic.py#L350-L354
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
Point2D.GetFloor
(*args, **kwargs)
return _core_.Point2D_GetFloor(*args, **kwargs)
GetFloor() -> (x,y) Convert to integer
GetFloor() -> (x,y)
[ "GetFloor", "()", "-", ">", "(", "x", "y", ")" ]
def GetFloor(*args, **kwargs): """ GetFloor() -> (x,y) Convert to integer """ return _core_.Point2D_GetFloor(*args, **kwargs)
[ "def", "GetFloor", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Point2D_GetFloor", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L1656-L1662
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py
python
inputhook_wx2
(context)
return 0
Run the wx event loop, polling for stdin. This version runs the wx eventloop for an undetermined amount of time, during which it periodically checks to see if anything is ready on stdin. If anything is ready on stdin, the event loop exits. The argument to elr.Run controls how often the event loop looks at stdin. This determines the responsiveness at the keyboard. A setting of 1000 enables a user to type at most 1 char per second. I have found that a setting of 10 gives good keyboard response. We can shorten it further, but eventually performance would suffer from calling select/kbhit too often.
Run the wx event loop, polling for stdin.
[ "Run", "the", "wx", "event", "loop", "polling", "for", "stdin", "." ]
def inputhook_wx2(context): """Run the wx event loop, polling for stdin. This version runs the wx eventloop for an undetermined amount of time, during which it periodically checks to see if anything is ready on stdin. If anything is ready on stdin, the event loop exits. The argument to elr.Run controls how often the event loop looks at stdin. This determines the responsiveness at the keyboard. A setting of 1000 enables a user to type at most 1 char per second. I have found that a setting of 10 gives good keyboard response. We can shorten it further, but eventually performance would suffer from calling select/kbhit too often. """ app = wx.GetApp() if app is not None: assert wx.Thread_IsMain() elr = EventLoopRunner() # As this time is made shorter, keyboard response improves, but idle # CPU load goes up. 10 ms seems like a good compromise. elr.Run(time=10, # CHANGE time here to control polling interval input_is_ready=context.input_is_ready) return 0
[ "def", "inputhook_wx2", "(", "context", ")", ":", "app", "=", "wx", ".", "GetApp", "(", ")", "if", "app", "is", "not", "None", ":", "assert", "wx", ".", "Thread_IsMain", "(", ")", "elr", "=", "EventLoopRunner", "(", ")", "# As this time is made shorter, keyboard response improves, but idle", "# CPU load goes up. 10 ms seems like a good compromise.", "elr", ".", "Run", "(", "time", "=", "10", ",", "# CHANGE time here to control polling interval", "input_is_ready", "=", "context", ".", "input_is_ready", ")", "return", "0" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py#L76-L98
weolar/miniblink49
1c4678db0594a4abde23d3ebbcc7cd13c3170777
v8_7_5/tools/grokdump.py
python
InspectionShell.do_known_oldspace
(self, address)
Teach V8 heap layout information to the inspector. Set the first old space page by passing any pointer into that page.
Teach V8 heap layout information to the inspector.
[ "Teach", "V8", "heap", "layout", "information", "to", "the", "inspector", "." ]
def do_known_oldspace(self, address): """ Teach V8 heap layout information to the inspector. Set the first old space page by passing any pointer into that page. """ address = self.ParseAddressExpr(address) page_address = address & ~self.heap.PageAlignmentMask() self.padawan.known_first_old_page = page_address
[ "def", "do_known_oldspace", "(", "self", ",", "address", ")", ":", "address", "=", "self", ".", "ParseAddressExpr", "(", "address", ")", "page_address", "=", "address", "&", "~", "self", ".", "heap", ".", "PageAlignmentMask", "(", ")", "self", ".", "padawan", ".", "known_first_old_page", "=", "page_address" ]
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/v8_7_5/tools/grokdump.py#L3646-L3654
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/plotting/_matplotlib/style.py
python
_derive_colors
( *, color: Color | Collection[Color] | None, colormap: str | Colormap | None, color_type: str, num_colors: int, )
Derive colors from either `colormap`, `color_type` or `color` inputs. Get a list of colors either from `colormap`, or from `color`, or from `color_type` (if both `colormap` and `color` are None). Parameters ---------- color : str or sequence, optional Color(s) to be used for deriving sequence of colors. Can be either be a single color (single color string, or sequence of floats representing a single color), or a sequence of colors. colormap : :py:class:`matplotlib.colors.Colormap`, optional Matplotlib colormap. When provided, the resulting colors will be derived from the colormap. color_type : {"default", "random"}, optional Type of colors to derive. Used if provided `color` and `colormap` are None. Ignored if either `color` or `colormap`` are not None. num_colors : int Number of colors to be extracted. Returns ------- list List of colors extracted. Warns ----- UserWarning If both `colormap` and `color` are provided. Parameter `color` will override.
Derive colors from either `colormap`, `color_type` or `color` inputs.
[ "Derive", "colors", "from", "either", "colormap", "color_type", "or", "color", "inputs", "." ]
def _derive_colors( *, color: Color | Collection[Color] | None, colormap: str | Colormap | None, color_type: str, num_colors: int, ) -> list[Color]: """ Derive colors from either `colormap`, `color_type` or `color` inputs. Get a list of colors either from `colormap`, or from `color`, or from `color_type` (if both `colormap` and `color` are None). Parameters ---------- color : str or sequence, optional Color(s) to be used for deriving sequence of colors. Can be either be a single color (single color string, or sequence of floats representing a single color), or a sequence of colors. colormap : :py:class:`matplotlib.colors.Colormap`, optional Matplotlib colormap. When provided, the resulting colors will be derived from the colormap. color_type : {"default", "random"}, optional Type of colors to derive. Used if provided `color` and `colormap` are None. Ignored if either `color` or `colormap`` are not None. num_colors : int Number of colors to be extracted. Returns ------- list List of colors extracted. Warns ----- UserWarning If both `colormap` and `color` are provided. Parameter `color` will override. """ if color is None and colormap is not None: return _get_colors_from_colormap(colormap, num_colors=num_colors) elif color is not None: if colormap is not None: warnings.warn( "'color' and 'colormap' cannot be used simultaneously. Using 'color'" ) return _get_colors_from_color(color) else: return _get_colors_from_color_type(color_type, num_colors=num_colors)
[ "def", "_derive_colors", "(", "*", ",", "color", ":", "Color", "|", "Collection", "[", "Color", "]", "|", "None", ",", "colormap", ":", "str", "|", "Colormap", "|", "None", ",", "color_type", ":", "str", ",", "num_colors", ":", "int", ",", ")", "->", "list", "[", "Color", "]", ":", "if", "color", "is", "None", "and", "colormap", "is", "not", "None", ":", "return", "_get_colors_from_colormap", "(", "colormap", ",", "num_colors", "=", "num_colors", ")", "elif", "color", "is", "not", "None", ":", "if", "colormap", "is", "not", "None", ":", "warnings", ".", "warn", "(", "\"'color' and 'colormap' cannot be used simultaneously. Using 'color'\"", ")", "return", "_get_colors_from_color", "(", "color", ")", "else", ":", "return", "_get_colors_from_color_type", "(", "color_type", ",", "num_colors", "=", "num_colors", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/plotting/_matplotlib/style.py#L80-L128
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_controls.py
python
ListItem.GetBackgroundColour
(*args, **kwargs)
return _controls_.ListItem_GetBackgroundColour(*args, **kwargs)
GetBackgroundColour(self) -> Colour
GetBackgroundColour(self) -> Colour
[ "GetBackgroundColour", "(", "self", ")", "-", ">", "Colour" ]
def GetBackgroundColour(*args, **kwargs): """GetBackgroundColour(self) -> Colour""" return _controls_.ListItem_GetBackgroundColour(*args, **kwargs)
[ "def", "GetBackgroundColour", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "ListItem_GetBackgroundColour", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L4260-L4262
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/geographic_msgs/msg/_RoutePath.py
python
RoutePath.__init__
(self, *args, **kwds)
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: header,network,segments,props :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments.
[ "Constructor", ".", "Any", "message", "fields", "that", "are", "implicitly", "/", "explicitly", "set", "to", "None", "will", "be", "assigned", "a", "default", "value", ".", "The", "recommend", "use", "is", "keyword", "arguments", "as", "this", "is", "more", "robust", "to", "future", "message", "changes", ".", "You", "cannot", "mix", "in", "-", "order", "arguments", "and", "keyword", "arguments", "." ]
def __init__(self, *args, **kwds): """ Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: header,network,segments,props :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields. """ if args or kwds: super(RoutePath, self).__init__(*args, **kwds) #message fields cannot be None, assign default values for those that are if self.header is None: self.header = std_msgs.msg.Header() if self.network is None: self.network = uuid_msgs.msg.UniqueID() if self.segments is None: self.segments = [] if self.props is None: self.props = [] else: self.header = std_msgs.msg.Header() self.network = uuid_msgs.msg.UniqueID() self.segments = [] self.props = []
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "if", "args", "or", "kwds", ":", "super", "(", "RoutePath", ",", "self", ")", ".", "__init__", "(", "*", "args", ",", "*", "*", "kwds", ")", "#message fields cannot be None, assign default values for those that are", "if", "self", ".", "header", "is", "None", ":", "self", ".", "header", "=", "std_msgs", ".", "msg", ".", "Header", "(", ")", "if", "self", ".", "network", "is", "None", ":", "self", ".", "network", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "if", "self", ".", "segments", "is", "None", ":", "self", ".", "segments", "=", "[", "]", "if", "self", ".", "props", "is", "None", ":", "self", ".", "props", "=", "[", "]", "else", ":", "self", ".", "header", "=", "std_msgs", ".", "msg", ".", "Header", "(", ")", "self", ".", "network", "=", "uuid_msgs", ".", "msg", ".", "UniqueID", "(", ")", "self", ".", "segments", "=", "[", "]", "self", ".", "props", "=", "[", "]" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/geographic_msgs/msg/_RoutePath.py#L69-L98